diff --git a/.ci/init.gradle b/.ci/init.gradle index 62e6c48b8121f..ec16b49bfabd4 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -6,6 +6,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA settings.pluginManagement { repositories { maven { + name "artifactory-gradle-plugins" url "https://artifactory.elstc.co/artifactory/gradle-plugins" credentials { username System.env.ELASTIC_ARTIFACTORY_USERNAME @@ -21,6 +22,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA buildscript { repositories { maven { + name "artifactory-gradle-release" url "https://artifactory.elstc.co/artifactory/gradle-release/" credentials { username System.env.ELASTIC_ARTIFACTORY_USERNAME @@ -31,6 +33,7 @@ if (System.env.ELASTIC_ARTIFACTORY_USERNAME == null || System.env.ELASTIC_ARTIFA } repositories { maven { + name "artifactory-gradle-release" url "https://artifactory.elstc.co/artifactory/gradle-release/" credentials { username System.env.ELASTIC_ARTIFACTORY_USERNAME diff --git a/.ci/matrix-runtime-javas.yml b/.ci/matrix-runtime-javas.yml index d4f390012be97..583acce078055 100644 --- a/.ci/matrix-runtime-javas.yml +++ b/.ci/matrix-runtime-javas.yml @@ -12,3 +12,4 @@ ES_RUNTIME_JAVA: - openjdk13 - zulu11 - zulu12 + - corretto11 diff --git a/Vagrantfile b/Vagrantfile index ef64eaa0071fc..c8eba2bca4a6d 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -67,7 +67,10 @@ Vagrant.configure(2) do |config| 'debian-8'.tap do |box| config.vm.define box, define_opts do |config| config.vm.box = 'elastic/debian-8-x86_64' - deb_common config, box + deb_common config, box, extra: <<-SHELL + # this sometimes gets a bad ip, and doesn't appear to be needed + rm -f /etc/apt/sources.list.d/http_debian_net_debian.list + SHELL end end 'debian-9'.tap do |box| @@ -158,13 +161,17 @@ def deb_common(config, name, extra: '') s.privileged = false s.inline = "sudo sed -i '/tty/!s/mesg n/tty -s \\&\\& mesg n/' /root/.profile" end + extra_with_lintian = <<-SHELL + #{extra} + install lintian + SHELL linux_common( config, name, update_command: 'apt-get update', update_tracking_file: '/var/cache/apt/archives/last_update', install_command: 'apt-get install -y', - extra: extra + extra: extra_with_lintian ) end @@ -249,10 +256,6 @@ def linux_common(config, touch /is_vagrant_vm # for consistency between linux and windows SHELL - config.vm.provision 'jdk-11', type: 'shell', inline: <<-SHELL - curl -sSL https://download.oracle.com/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz | tar xz -C /opt/ - SHELL - # This prevents leftovers from previous tests using the # same VM from messing up the current test config.vm.provision 'clean es installs in tmp', run: 'always', type: 'shell', inline: <<-SHELL @@ -348,11 +351,10 @@ def sh_install_deps(config, return 1 } cat \<\ /etc/profile.d/java_home.sh -if [ -z "\\\$JAVA_HOME" ]; then - export JAVA_HOME=/opt/jdk-11.0.2 +if [ ! -z "\\\$JAVA_HOME" ]; then + export SYSTEM_JAVA_HOME=\\\$JAVA_HOME + unset JAVA_HOME fi -export SYSTEM_JAVA_HOME=\\\$JAVA_HOME -unset JAVA_HOME JAVA ensure tar ensure curl @@ -409,16 +411,9 @@ def windows_common(config, name) $ps_prompt | Out-File $PsHome/Microsoft.PowerShell_profile.ps1 SHELL - config.vm.provision 'windows-jdk-11', type: 'shell', inline: <<-SHELL - New-Item -ItemType Directory -Force -Path "C:/java" - Invoke-WebRequest "https://download.oracle.com/java/GA/jdk11/9/GPL/openjdk-11.0.2_windows-x64_bin.zip" -OutFile "C:/java/jdk-11.zip" - Expand-Archive -Path "C:/java/jdk-11.zip" -DestinationPath "C:/java/" - SHELL - config.vm.provision 'set env variables', type: 'shell', inline: <<-SHELL $ErrorActionPreference = "Stop" [Environment]::SetEnvironmentVariable("PACKAGING_ARCHIVES", "C:/project/build/packaging/archives", "Machine") - [Environment]::SetEnvironmentVariable("SYSTEM_JAVA_HOME", "C:\java\jdk-11.0.2", "Machine") [Environment]::SetEnvironmentVariable("PACKAGING_TESTS", "C:/project/build/packaging/tests", "Machine") [Environment]::SetEnvironmentVariable("JAVA_HOME", $null, "Machine") SHELL diff --git a/build.gradle b/build.gradle index 583569ef7cd45..8794a1f930523 100644 --- a/build.gradle +++ b/build.gradle @@ -88,7 +88,7 @@ subprojects { } repositories { maven { - name = 'localTest' + name = 'test' url = "${rootProject.buildDir}/local-test-repo" } } @@ -619,21 +619,6 @@ allprojects { } } -subprojects { - // Common config when running with a FIPS-140 runtime JVM - if (project.ext.has("inFipsJvm") && project.ext.inFipsJvm) { - tasks.withType(Test) { - systemProperty 'javax.net.ssl.trustStorePassword', 'password' - systemProperty 'javax.net.ssl.keyStorePassword', 'password' - } - project.pluginManager.withPlugin("elasticsearch.testclusters") { - project.testClusters.all { - systemProperty 'javax.net.ssl.trustStorePassword', 'password' - systemProperty 'javax.net.ssl.keyStorePassword', 'password' - } - } - } -} diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index c2f0351c2f174..737bbca4cafb9 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -124,6 +124,7 @@ dependencies { compile 'com.avast.gradle:gradle-docker-compose-plugin:0.8.12' testCompile "junit:junit:${props.getProperty('junit')}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" + testCompile 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2' } /***************************************************************************** @@ -189,7 +190,7 @@ if (project != rootProject) { task integTest(type: Test) { // integration test requires the local testing repo for example plugin builds dependsOn project.rootProject.allprojects.collect { - it.tasks.matching { it.name == 'publishNebulaPublicationToLocalTestRepository'} + it.tasks.matching { it.name == 'publishNebulaPublicationToTestRepository'} } dependsOn setupLocalDownloads exclude "**/*Tests.class" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 3db5627390548..f2cbe82a93196 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -39,6 +39,7 @@ import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler +import org.gradle.api.artifacts.repositories.ArtifactRepository import org.gradle.api.artifacts.repositories.IvyArtifactRepository import org.gradle.api.artifacts.repositories.MavenArtifactRepository import org.gradle.api.credentials.HttpHeaderCredentials @@ -115,6 +116,22 @@ class BuildPlugin implements Plugin { configureTestTasks(project) configurePrecommit(project) configureDependenciesInfo(project) + + // Common config when running with a FIPS-140 runtime JVM + // Need to do it here to support external plugins + if (project.ext.inFipsJvm) { + project.tasks.withType(Test) { + systemProperty 'javax.net.ssl.trustStorePassword', 'password' + systemProperty 'javax.net.ssl.keyStorePassword', 'password' + } + project.pluginManager.withPlugin("elasticsearch.testclusters") { + project.testClusters.all { + systemProperty 'javax.net.ssl.trustStorePassword', 'password' + systemProperty 'javax.net.ssl.keyStorePassword', 'password' + } + } + } + } @@ -589,11 +606,11 @@ class BuildPlugin implements Plugin { project.getRepositories().all { repository -> if (repository instanceof MavenArtifactRepository) { final MavenArtifactRepository maven = (MavenArtifactRepository) repository - assertRepositoryURIUsesHttps(project, maven.getUrl()) + assertRepositoryURIUsesHttps(maven, project, maven.getUrl()) repository.getArtifactUrls().each { uri -> assertRepositoryURIUsesHttps(project, uri) } } else if (repository instanceof IvyArtifactRepository) { final IvyArtifactRepository ivy = (IvyArtifactRepository) repository - assertRepositoryURIUsesHttps(project, ivy.getUrl()) + assertRepositoryURIUsesHttps(ivy, project, ivy.getUrl()) } } RepositoryHandler repos = project.repositories @@ -605,6 +622,7 @@ class BuildPlugin implements Plugin { } repos.jcenter() repos.ivy { + name "elasticsearch" url "https://artifacts.elastic.co/downloads" patternLayout { artifact "elasticsearch/[module]-[revision](-[classifier]).[ext]" @@ -633,9 +651,9 @@ class BuildPlugin implements Plugin { } } - private static void assertRepositoryURIUsesHttps(final Project project, final URI uri) { + private static void assertRepositoryURIUsesHttps(final ArtifactRepository repository, final Project project, final URI uri) { if (uri != null && uri.toURL().getProtocol().equals("http")) { - throw new GradleException("repository on project with path [${project.path}] is using http for artifacts on [${uri.toURL()}]") + throw new GradleException("repository [${repository.name}] on project with path [${project.path}] is using http for artifacts on [${uri.toURL()}]") } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 0316acef922c4..e14a8f97ba81d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -34,6 +34,9 @@ import org.gradle.api.plugins.quality.Checkstyle class PrecommitTasks { /** Adds a precommit task, which depends on non-test verification tasks. */ + + public static final String CHECKSTYLE_VERSION = '8.20' + public static Task create(Project project, boolean includeDependencyLicenses) { project.configurations.create("forbiddenApisCliJar") project.dependencies { @@ -213,7 +216,7 @@ class PrecommitTasks { configProperties = [ suppressions: checkstyleSuppressions ] - toolVersion = '8.10.1' + toolVersion = CHECKSTYLE_VERSION } project.tasks.withType(Checkstyle) { task -> diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index eec46f9a522a7..ef784b6f901d1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -70,7 +70,7 @@ class RestIntegTestTask extends DefaultTask { project.testClusters { "$name" { distribution = 'INTEG_TEST' - version = project.version + version = VersionProperties.elasticsearch javaHome = project.file(project.ext.runtimeJavaHome) } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy index 110f2fc7e8461..af5d328dc0cad 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy @@ -27,15 +27,15 @@ import org.gradle.api.tasks.Input public class BatsOverVagrantTask extends VagrantCommandTask { @Input - String remoteCommand + Object remoteCommand BatsOverVagrantTask() { command = 'ssh' } - void setRemoteCommand(String remoteCommand) { + void setRemoteCommand(Object remoteCommand) { this.remoteCommand = Objects.requireNonNull(remoteCommand) - setArgs(['--command', remoteCommand]) + setArgs((Iterable) ['--command', remoteCommand]) } @Override diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index f1d7609aff996..71c9d53467502 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -1,11 +1,18 @@ package org.elasticsearch.gradle.vagrant import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.BwcVersions import org.elasticsearch.gradle.FileContentsTask +import org.elasticsearch.gradle.Jdk +import org.elasticsearch.gradle.JdkDownloadPlugin import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.BwcVersions -import org.gradle.api.* +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException +import org.gradle.api.NamedDomainObjectContainer +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.Task import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency @@ -15,6 +22,8 @@ import org.gradle.api.tasks.Exec import org.gradle.api.tasks.StopExecutionException import org.gradle.api.tasks.TaskState +import java.nio.file.Paths + import static java.util.Collections.unmodifiableList class VagrantTestPlugin implements Plugin { @@ -30,7 +39,7 @@ class VagrantTestPlugin implements Plugin { 'oel-6', 'oel-7', 'opensuse-42', - 'rhel-8', + /* TODO: need a real RHEL license now that it is out of beta 'rhel-8',*/ 'sles-12', 'ubuntu-1604', 'ubuntu-1804' @@ -85,8 +94,33 @@ class VagrantTestPlugin implements Plugin { /** extra env vars to pass to vagrant for box configuration **/ Map vagrantBoxEnvVars = [:] + private static final String SYSTEM_JDK_VERSION = "11.0.2+9" + private static final String GRADLE_JDK_VERSION = "12.0.1+12@69cfe15208a647278a19ef0990eea691" + private Jdk linuxSystemJdk; + private Jdk linuxGradleJdk; + private Jdk windowsSystemJdk; + private Jdk windowsGradleJdk; + @Override void apply(Project project) { + project.pluginManager.apply(JdkDownloadPlugin.class) + NamedDomainObjectContainer jdksContainer = (NamedDomainObjectContainer) project.getExtensions().getByName("jdks"); + linuxSystemJdk = jdksContainer.create("linux_system") { + version = SYSTEM_JDK_VERSION + platform = "linux" + } + linuxGradleJdk = jdksContainer.create("linux_gradle") { + version = GRADLE_JDK_VERSION + platform = "linux" + } + windowsSystemJdk = jdksContainer.create("windows_system") { + version = SYSTEM_JDK_VERSION + platform = "windows" + } + windowsGradleJdk = jdksContainer.create("windows_gradle") { + version = GRADLE_JDK_VERSION + platform = "windows" + } collectAvailableBoxes(project) @@ -174,6 +208,7 @@ class VagrantTestPlugin implements Plugin { which should work for 5.0.0+. This isn't a real ivy repository but gradle is fine with that */ repos.ivy { + name "elasticsearch" artifactPattern "https://artifacts.elastic.co/downloads/elasticsearch/[module]-[revision].[ext]" } } @@ -263,7 +298,7 @@ class VagrantTestPlugin implements Plugin { } } - private static void createPrepareVagrantTestEnvTask(Project project) { + private void createPrepareVagrantTestEnvTask(Project project) { File packagingDir = new File(project.buildDir, PACKAGING_CONFIGURATION) File archivesDir = new File(packagingDir, 'archives') @@ -279,7 +314,7 @@ class VagrantTestPlugin implements Plugin { } Task createLinuxRunnerScript = project.tasks.create('createLinuxRunnerScript', FileContentsTask) { - dependsOn copyPackagingTests + dependsOn copyPackagingTests, linuxGradleJdk, linuxSystemJdk file "${testsDir}/run-tests.sh" contents """\ if [ "\$#" -eq 0 ]; then @@ -287,11 +322,15 @@ class VagrantTestPlugin implements Plugin { else test_args=( "\$@" ) fi - "\$SYSTEM_JAVA_HOME"/bin/java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}" + + if [ -z "\$SYSTEM_JAVA_HOME" ]; then + export SYSTEM_JAVA_HOME="${-> convertPath(project, linuxSystemJdk.toString()) }" + fi + "${-> convertPath(project, linuxGradleJdk.toString()) }"/bin/java -cp "\$PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner "\${test_args[@]}" """ } Task createWindowsRunnerScript = project.tasks.create('createWindowsRunnerScript', FileContentsTask) { - dependsOn copyPackagingTests + dependsOn copyPackagingTests, windowsGradleJdk, windowsSystemJdk file "${testsDir}/run-tests.ps1" // the use of $args rather than param() here is deliberate because the syntax for array (multivalued) parameters is likely // a little trappy for those unfamiliar with powershell @@ -301,7 +340,8 @@ class VagrantTestPlugin implements Plugin { } else { \$testArgs = \$args } - & "\$Env:SYSTEM_JAVA_HOME"/bin/java -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs + \$Env:SYSTEM_JAVA_HOME = "${-> convertPath(project, windowsSystemJdk.toString()) }" + & "${-> convertPath(project, windowsGradleJdk.toString()) }"/bin/java -cp "\$Env:PACKAGING_TESTS/*" org.elasticsearch.packaging.VMTestRunner @testArgs exit \$LASTEXITCODE """ } @@ -538,10 +578,10 @@ class VagrantTestPlugin implements Plugin { if (LINUX_BOXES.contains(box)) { Task batsPackagingTest = project.tasks.create("vagrant${boxTask}#batsPackagingTest", BatsOverVagrantTask) { - remoteCommand BATS_TEST_COMMAND + remoteCommand "export SYSTEM_JAVA_HOME=\"${-> convertPath(project, linuxSystemJdk.toString())}\"; " + BATS_TEST_COMMAND boxName box environmentVars vagrantEnvVars - dependsOn up, setupPackagingTest + dependsOn up, setupPackagingTest, linuxSystemJdk finalizedBy halt } @@ -616,4 +656,9 @@ class VagrantTestPlugin implements Plugin { } } } + + // convert the given path from an elasticsearch repo path to a VM path + private String convertPath(Project project, String path) { + return "/elasticsearch/" + project.rootDir.toPath().relativize(Paths.get(path)); + } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java b/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java new file mode 100644 index 0000000000000..aa26f398e8be9 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/Jdk.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle; + +import org.gradle.api.Buildable; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.provider.Property; +import org.gradle.api.tasks.TaskDependency; + +import java.io.File; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.regex.Pattern; + +public class Jdk implements Buildable, Iterable { + + static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)(@([a-f0-9]{32}))?"); + private static final List ALLOWED_PLATFORMS = Collections.unmodifiableList(Arrays.asList("linux", "windows", "darwin")); + + private final String name; + private final Configuration configuration; + + private final Property version; + private final Property platform; + + + Jdk(String name, Project project) { + this.name = name; + this.configuration = project.getConfigurations().create("jdk_" + name); + this.version = project.getObjects().property(String.class); + this.platform = project.getObjects().property(String.class); + } + + public String getName() { + return name; + } + + public String getVersion() { + return version.get(); + } + + public void setVersion(String version) { + if (VERSION_PATTERN.matcher(version).matches() == false) { + throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]"); + } + this.version.set(version); + } + + public String getPlatform() { + return platform.get(); + } + + public void setPlatform(String platform) { + if (ALLOWED_PLATFORMS.contains(platform) == false) { + throw new IllegalArgumentException( + "unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS); + } + this.platform.set(platform); + } + + // pkg private, for internal use + Configuration getConfiguration() { + return configuration; + } + + @Override + public String toString() { + return configuration.getSingleFile().toString(); + } + + @Override + public TaskDependency getBuildDependencies() { + return configuration.getBuildDependencies(); + } + + // internal, make this jdks configuration unmodifiable + void finalizeValues() { + if (version.isPresent() == false) { + throw new IllegalArgumentException("version not specified for jdk [" + name + "]"); + } + if (platform.isPresent() == false) { + throw new IllegalArgumentException("platform not specified for jdk [" + name + "]"); + } + version.finalizeValue(); + platform.finalizeValue(); + } + + @Override + public Iterator iterator() { + return configuration.iterator(); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java new file mode 100644 index 0000000000000..a408b66ec817d --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java @@ -0,0 +1,184 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle; + +import org.gradle.api.Action; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.UnknownTaskException; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ConfigurationContainer; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.artifacts.dsl.RepositoryHandler; +import org.gradle.api.artifacts.repositories.IvyArtifactRepository; +import org.gradle.api.file.CopySpec; +import org.gradle.api.file.FileTree; +import org.gradle.api.file.RelativePath; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.TaskProvider; + +import java.io.File; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.function.Supplier; +import java.util.regex.Matcher; + +public class JdkDownloadPlugin implements Plugin { + + private static final String REPO_NAME_PREFIX = "jdk_repo_"; + + @Override + public void apply(Project project) { + NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> + new Jdk(name, project) + ); + project.getExtensions().add("jdks", jdksContainer); + + project.afterEvaluate(p -> { + for (Jdk jdk : jdksContainer) { + jdk.finalizeValues(); + String version = jdk.getVersion(); + String platform = jdk.getPlatform(); + + // depend on the jdk directory "artifact" from the root project + DependencyHandler dependencies = project.getDependencies(); + Map depConfig = new HashMap<>(); + depConfig.put("path", ":"); // root project + depConfig.put("configuration", configName("extracted_jdk", version, platform)); + dependencies.add(jdk.getConfiguration().getName(), dependencies.project(depConfig)); + + // ensure a root level jdk download task exists + setupRootJdkDownload(project.getRootProject(), platform, version); + } + }); + + // all other repos should ignore the special jdk artifacts + project.getRootProject().getRepositories().all(repo -> { + if (repo.getName().startsWith(REPO_NAME_PREFIX) == false) { + repo.content(content -> content.excludeGroup("jdk")); + } + }); + } + + private static void setupRootJdkDownload(Project rootProject, String platform, String version) { + String extractTaskName = "extract" + capitalize(platform) + "Jdk" + version; + // NOTE: this is *horrendous*, but seems to be the only way to check for the existence of a registered task + try { + rootProject.getTasks().named(extractTaskName); + // already setup this version + return; + } catch (UnknownTaskException e) { + // fall through: register the task + } + + // decompose the bundled jdk version, broken into elements as: [feature, interim, update, build] + // Note the "patch" version is not yet handled here, as it has not yet been used by java. + Matcher jdkVersionMatcher = Jdk.VERSION_PATTERN.matcher(version); + if (jdkVersionMatcher.matches() == false) { + throw new IllegalArgumentException("Malformed jdk version [" + version + "]"); + } + String jdkVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : ""); + String jdkMajor = jdkVersionMatcher.group(1); + String jdkBuild = jdkVersionMatcher.group(3); + String hash = jdkVersionMatcher.group(5); + + // add fake ivy repo for jdk url + String repoName = REPO_NAME_PREFIX + version; + RepositoryHandler repositories = rootProject.getRepositories(); + if (rootProject.getRepositories().findByName(repoName) == null) { + if (hash != null) { + // current pattern since 12.0.1 + repositories.ivy(ivyRepo -> { + ivyRepo.setName(repoName); + ivyRepo.setUrl("https://download.oracle.com"); + ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + ivyRepo.patternLayout(layout -> layout.artifact( + "java/GA/jdk" + jdkVersion + "/" + hash + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]")); + ivyRepo.content(content -> content.includeGroup("jdk")); + }); + } else { + // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back + repositories.ivy(ivyRepo -> { + ivyRepo.setName(repoName); + ivyRepo.setUrl("https://download.oracle.com"); + ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + ivyRepo.patternLayout(layout -> + layout.artifact("java/GA/jdk" + jdkMajor + "/" + jdkBuild + "/GPL/openjdk-[revision]_[module]-x64_bin.[ext]")); + ivyRepo.content(content -> content.includeGroup("jdk")); + }); + } + } + + // add the jdk as a "dependency" + final ConfigurationContainer configurations = rootProject.getConfigurations(); + String remoteConfigName = configName("openjdk", version, platform); + String localConfigName = configName("extracted_jdk", version, platform); + Configuration jdkConfig = configurations.findByName(remoteConfigName); + if (jdkConfig == null) { + jdkConfig = configurations.create(remoteConfigName); + configurations.create(localConfigName); + } + String extension = platform.equals("windows") ? "zip" : "tar.gz"; + String jdkDep = "jdk:" + (platform.equals("darwin") ? "osx" : platform) + ":" + jdkVersion + "@" + extension; + rootProject.getDependencies().add(configName("openjdk", version, platform), jdkDep); + + // add task for extraction + // TODO: look into doing this as an artifact transform, which are cacheable starting in gradle 5.3 + int rootNdx = platform.equals("darwin") ? 2 : 1; + Action removeRootDir = copy -> { + // remove extra unnecessary directory levels + copy.eachFile(details -> { + String[] pathSegments = details.getRelativePath().getSegments(); + String[] newPathSegments = Arrays.copyOfRange(pathSegments, rootNdx, pathSegments.length); + details.setRelativePath(new RelativePath(true, newPathSegments)); + }); + copy.setIncludeEmptyDirs(false); + }; + // delay resolving jdkConfig until runtime + Supplier jdkArchiveGetter = jdkConfig::getSingleFile; + final Callable fileGetter; + if (extension.equals("zip")) { + fileGetter = () -> rootProject.zipTree(jdkArchiveGetter.get()); + } else { + fileGetter = () -> rootProject.tarTree(rootProject.getResources().gzip(jdkArchiveGetter.get())); + } + String extractDir = rootProject.getBuildDir().toPath().resolve("jdks/openjdk-" + jdkVersion + "_" + platform).toString(); + TaskProvider extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> { + copyTask.doFirst(t -> rootProject.delete(extractDir)); + copyTask.into(extractDir); + copyTask.from(fileGetter, removeRootDir); + }); + rootProject.getArtifacts().add(localConfigName, + rootProject.getLayout().getProjectDirectory().dir(extractDir), + artifact -> artifact.builtBy(extractTask)); + } + + private static String configName(String prefix, String version, String platform) { + return prefix + "_" + version + "_" + platform; + } + + private static String capitalize(String s) { + return s.substring(0, 1).toUpperCase(Locale.ROOT) + s.substring(1); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java index a8680ef13dda0..3ac4a53910c26 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/http/WaitForHttpResource.java @@ -123,8 +123,7 @@ public boolean wait(int durationInMs) throws GeneralSecurityException, Interrupt if (System.nanoTime() < waitUntil) { Thread.sleep(sleep); } else { - logger.error("Failed to access url [{}]", url, failure); - return false; + throw failure; } } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java index 8ec979420c0e4..e73a9d1e585e3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.java @@ -28,11 +28,13 @@ import org.gradle.api.file.FileTree; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.Classpath; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFile; import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Optional; -import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.PathSensitive; import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.SkipWhenEmpty; @@ -45,6 +47,7 @@ import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.Arrays; import java.util.Collections; import java.util.Set; @@ -113,7 +116,7 @@ public void setJavaHome(String javaHome) { this.javaHome = javaHome; } - @OutputDirectory + @Internal public File getJarExpandDir() { return new File( new File(getProject().getBuildDir(), "precommit/thirdPartyAudit"), @@ -121,6 +124,11 @@ public File getJarExpandDir() { ); } + @OutputFile + public File getSuccessMarker() { + return new File(getProject().getBuildDir(), "markers/" + getName()); + } + public void ignoreMissingClasses(String... classesOrPackages) { if (classesOrPackages.length == 0) { missingClassExcludes = null; @@ -157,8 +165,7 @@ public Set getMissingClassExcludes() { return missingClassExcludes; } - @InputFiles - @PathSensitive(PathSensitivity.NAME_ONLY) + @Classpath @SkipWhenEmpty public Set getJarsToScan() { // These are SelfResolvingDependency, and some of them backed by file collections, like the Gradle API files, @@ -241,6 +248,10 @@ public void runThirdPartyAudit() throws IOException { } assertNoJarHell(jdkJarHellClasses); + + // Mark successful third party audit check + getSuccessMarker().getParentFile().mkdirs(); + Files.write(getSuccessMarker().toPath(), new byte[]{}); } private void logForbiddenAPIsOutput(String forbiddenApisOutput) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index 35b3bbd7481b6..0cb7ee0c10fc7 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -22,23 +22,22 @@ import org.elasticsearch.gradle.Distribution; import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.http.WaitForHttpResource; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; import java.io.UncheckedIOException; -import java.net.HttpURLConnection; import java.net.URI; -import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; +import java.security.GeneralSecurityException; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -75,6 +74,8 @@ public ElasticsearchCluster(String path, String clusterName, Project project, Fi services, artifactsExtractDir, workingDirBase ) ); + + addWaitForClusterHealth(); } public void setNumberOfNodes(int numberOfNodes) { @@ -219,6 +220,11 @@ public void extraConfigFile(String destination, File from) { nodes.all(node -> node.extraConfigFile(destination, from)); } + @Override + public void user(Map userSpec) { + nodes.all(node -> node.user(userSpec)); + } + private void writeUnicastHostsFiles() { String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n")); nodes.forEach(node -> { @@ -262,9 +268,6 @@ public void waitForAllConditions() { writeUnicastHostsFiles(); LOGGER.info("Starting to wait for cluster to form"); - addWaitForUri( - "cluster health yellow", "/_cluster/health?wait_for_nodes=>=" + nodes.size() + "&wait_for_status=yellow" - ); waitForConditions(waitConditions, startedAt, CLUSTER_UP_TIMEOUT, CLUSTER_UP_TIMEOUT_UNIT, this); } @@ -293,21 +296,25 @@ public ElasticsearchNode singleNode() { return getFirstNode(); } - private void addWaitForUri(String description, String uri) { - waitConditions.put(description, (node) -> { + private void addWaitForClusterHealth() { + waitConditions.put("cluster health yellow", (node) -> { try { - URL url = new URL("http://" + getFirstNode().getHttpSocketURI() + uri); - HttpURLConnection con = (HttpURLConnection) url.openConnection(); - con.setRequestMethod("GET"); - con.setConnectTimeout(500); - con.setReadTimeout(500); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(con.getInputStream()))) { - String response = reader.lines().collect(Collectors.joining("\n")); - LOGGER.info("{} -> {} ->\n{}", this, uri, response); + WaitForHttpResource wait = new WaitForHttpResource( + "http", getFirstNode().getHttpSocketURI(), nodes.size() + ); + List> credentials = getFirstNode().getCredentials(); + if (getFirstNode().getCredentials().isEmpty() == false) { + wait.setUsername(credentials.get(0).get("useradd")); + wait.setPassword(credentials.get(0).get("-p")); } - return true; + return wait.wait(500); } catch (IOException e) { throw new IllegalStateException("Connection attempt to " + this + " failed", e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new TestClustersException("Interrupted while waiting for " + this, e); + } catch (GeneralSecurityException e) { + throw new RuntimeException("security exception", e); } }); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 596e205480cb0..3bb1fb2ddb6e3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -38,6 +38,7 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -86,6 +87,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final Map> environment = new LinkedHashMap<>(); private final Map extraConfigFiles = new HashMap<>(); final LinkedHashMap defaultConfig = new LinkedHashMap<>(); + private final List> credentials = new ArrayList<>(); private final Path confPathRepo; private final Path configFile; @@ -117,8 +119,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { esStdoutFile = confPathLogs.resolve("es.stdout.log"); esStderrFile = confPathLogs.resolve("es.stderr.log"); tmpDir = workingDir.resolve("tmp"); - waitConditions.put("http ports file", node -> Files.exists(((ElasticsearchNode) node).httpPortsFile)); - waitConditions.put("transport ports file", node -> Files.exists(((ElasticsearchNode)node).transportPortFile)); + waitConditions.put("ports files", this::checkPortsFilesExistWithDelay); } public String getName() { @@ -319,9 +320,25 @@ public synchronized void start() { copyExtraConfigFiles(); + if (isSettingMissingOrTrue("xpack.security.enabled")) { + if (credentials.isEmpty()) { + user(Collections.emptyMap()); + } + credentials.forEach(paramMap -> runElaticsearchBinScript( + "elasticsearch-users", + paramMap.entrySet().stream() + .flatMap(entry -> Stream.of(entry.getKey(), entry.getValue())) + .toArray(String[]::new) + )); + } + startElasticsearchProcess(); } + private boolean isSettingMissingOrTrue(String name) { + return Boolean.valueOf(settings.getOrDefault(name, () -> "false").get().toString()); + } + private void copyExtraConfigFiles() { extraConfigFiles.forEach((destination, from) -> { if (Files.exists(from.toPath()) == false) { @@ -375,6 +392,22 @@ public void extraConfigFile(String destination, File from) { extraConfigFiles.put(destination, from); } + @Override + public void user(Map userSpec) { + Set keys = new HashSet<>(userSpec.keySet()); + keys.remove("username"); + keys.remove("password"); + keys.remove("role"); + if (keys.isEmpty() == false) { + throw new TestClustersException("Unknown keys in user definition " + keys + " for " + this); + } + Map cred = new LinkedHashMap<>(); + cred.put("useradd", userSpec.getOrDefault("username","test_user")); + cred.put("-p", userSpec.getOrDefault("password","x-pack-test-password")); + cred.put("-r", userSpec.getOrDefault("role", "superuser")); + credentials.add(cred); + } + private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) { try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) { services.loggedExec(spec -> { @@ -752,4 +785,21 @@ public int hashCode() { public String toString() { return "node{" + path + ":" + name + "}"; } + + List> getCredentials() { + return credentials; + } + + private boolean checkPortsFilesExistWithDelay(TestClusterConfiguration node) { + if (Files.exists(httpPortsFile) && Files.exists(transportPortFile)) { + return true; + } + try { + Thread.sleep(500); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new TestClustersException("Interrupted while waiting for ports files", e); + } + return Files.exists(httpPortsFile) && Files.exists(transportPortFile); + } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java index 39f9683ff4863..628dadcbb9d37 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java @@ -27,6 +27,7 @@ import java.net.URI; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.function.Supplier; @@ -72,6 +73,8 @@ public interface TestClusterConfiguration { void extraConfigFile(String destination, File from); + void user(Map userSpec); + String getHttpSocketURI(); String getTransportPortURI(); @@ -108,7 +111,7 @@ default void waitForConditions( break; } } catch (TestClustersException e) { - throw new TestClustersException(e); + throw e; } catch (Exception e) { if (lastException == null) { lastException = e; @@ -116,12 +119,6 @@ default void waitForConditions( lastException = e; } } - try { - Thread.sleep(500); - } - catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } } if (conditionMet == false) { String message = "`" + context + "` failed to wait for " + description + " after " + diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index c1ed6b770f04c..daca1f5ebb191 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -182,8 +182,9 @@ private void configureClaimClustersHook(Project project) { claimsInventory.put(elasticsearchCluster, claimsInventory.getOrDefault(elasticsearchCluster, 0) + 1); } })); - - logger.info("Claims inventory: {}", claimsInventory); + if (claimsInventory.isEmpty() == false) { + logger.info("Claims inventory: {}", claimsInventory); + } }); } @@ -279,8 +280,14 @@ private static void autoConfigureClusterDependencies( // the clusters will look for artifacts there based on the naming conventions. // Tasks that use a cluster will add this as a dependency automatically so it's guaranteed to run early in // the build. - Task sync = Boilerplate.maybeCreate(rootProject.getTasks(), SYNC_ARTIFACTS_TASK_NAME, onCreate -> { + Boilerplate.maybeCreate(rootProject.getTasks(), SYNC_ARTIFACTS_TASK_NAME, onCreate -> { onCreate.getOutputs().dir(getExtractDir(rootProject)); + onCreate.getInputs().files( + project.getRootProject().getConfigurations().matching(conf -> conf.getName().startsWith(HELPER_CONFIGURATION_PREFIX)) + ); + onCreate.dependsOn(project.getRootProject().getConfigurations() + .matching(conf -> conf.getName().startsWith(HELPER_CONFIGURATION_PREFIX)) + ); // NOTE: Gradle doesn't allow a lambda here ( fails at runtime ) onCreate.doFirst(new Action() { @Override @@ -290,6 +297,31 @@ public void execute(Task task) { project.delete(getExtractDir(rootProject)); } }); + onCreate.doLast(new Action() { + @Override + public void execute(Task task) { + project.getRootProject().getConfigurations() + .matching(config -> config.getName().startsWith(HELPER_CONFIGURATION_PREFIX)) + .forEach(config -> project.copy(spec -> + config.getResolvedConfiguration() + .getResolvedArtifacts() + .forEach(resolvedArtifact -> { + final FileTree files; + File file = resolvedArtifact.getFile(); + if (file.getName().endsWith(".zip")) { + files = project.zipTree(file); + } else if (file.getName().endsWith("tar.gz")) { + files = project.tarTree(file); + } else { + throw new IllegalArgumentException("Can't extract " + file + " unknown file extension"); + } + logger.info("Extracting {}@{}", resolvedArtifact, config); + spec.from(files, s -> s.into(resolvedArtifact.getModuleVersion().getId().getGroup())); + spec.into(getExtractDir(project)); + })) + ); + } + }); }); // When the project evaluated we know of all tasks that use clusters. @@ -347,29 +379,6 @@ public void execute(Task task) { distribution.getFileExtension()); } - - sync.getInputs().files(helperConfiguration); - // NOTE: Gradle doesn't allow a lambda here ( fails at runtime ) - sync.doLast(new Action() { - @Override - public void execute(Task task) { - project.copy(spec -> - helperConfiguration.getResolvedConfiguration().getResolvedArtifacts().forEach(resolvedArtifact -> { - final FileTree files; - File file = resolvedArtifact.getFile(); - if (file.getName().endsWith(".zip")) { - files = project.zipTree(file); - } else if (file.getName().endsWith("tar.gz")) { - files = project.tarTree(file); - } else { - throw new IllegalArgumentException("Can't extract " + file + " unknown file extension"); - } - - spec.from(files, s -> s.into(resolvedArtifact.getModuleVersion().getId().getGroup())); - spec.into(getExtractDir(project)); - })); - } - }); }))); } diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LazyFileOutputStream.java b/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LazyFileOutputStream.java new file mode 100644 index 0000000000000..d3101868e84b6 --- /dev/null +++ b/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LazyFileOutputStream.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +/** + * An outputstream to a File that is lazily opened on the first write. + */ +class LazyFileOutputStream extends OutputStream { + private OutputStream delegate; + + LazyFileOutputStream(File file) { + // use an initial dummy delegate to avoid doing a conditional on every write + this.delegate = new OutputStream() { + private void bootstrap() throws IOException { + file.getParentFile().mkdirs(); + delegate = new FileOutputStream(file); + } + @Override + public void write(int b) throws IOException { + bootstrap(); + delegate.write(b); + } + @Override + public void write(byte b[], int off, int len) throws IOException { + bootstrap(); + delegate.write(b, off, len); + } + }; + } + + @Override + public void write(int b) throws IOException { + delegate.write(b); + } + + @Override + public void write(byte b[], int off, int len) throws IOException { + delegate.write(b, off, len); + } + + @Override + public void close() throws IOException { + delegate.close(); + } +} diff --git a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java b/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java index 8dd59170039eb..c71b7ba183562 100644 --- a/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java +++ b/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/LoggedExec.java @@ -3,14 +3,22 @@ import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.Project; +import org.gradle.api.logging.Logger; import org.gradle.api.tasks.Exec; +import org.gradle.api.tasks.Internal; import org.gradle.process.BaseExecSpec; import org.gradle.process.ExecResult; import org.gradle.process.ExecSpec; import org.gradle.process.JavaExecSpec; import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.function.Consumer; import java.util.function.Function; /** @@ -19,35 +27,56 @@ @SuppressWarnings("unchecked") public class LoggedExec extends Exec { + private Consumer outputLogger; + public LoggedExec() { - ByteArrayOutputStream output = new ByteArrayOutputStream(); - ByteArrayOutputStream error = new ByteArrayOutputStream(); + if (getLogger().isInfoEnabled() == false) { - setStandardOutput(output); - setErrorOutput(error); setIgnoreExitValue(true); - doLast((unused) -> { - if (getExecResult().getExitValue() != 0) { - try { - getLogger().error("Standard output:"); - getLogger().error(output.toString("UTF-8")); - getLogger().error("Standard error:"); - getLogger().error(error.toString("UTF-8")); - } catch (UnsupportedEncodingException e) { - throw new GradleException("Failed to read exec output", e); - } - throw new GradleException( - String.format( - "Process '%s %s' finished with non-zero exit value %d", - getExecutable(), - getArgs(), - getExecResult().getExitValue() - ) - ); + setSpoolOutput(false); + doLast(task -> { + if (getExecResult().getExitValue() != 0) { + try { + getLogger().error("Output for " + getExecutable() + ":"); + outputLogger.accept(getLogger()); + } catch (Exception e) { + throw new GradleException("Failed to read exec output", e); + } + throw new GradleException( + String.format( + "Process '%s %s' finished with non-zero exit value %d", + getExecutable(), + getArgs(), + getExecResult().getExitValue() + ) + ); + } + }); + } + } + + @Internal + public void setSpoolOutput(boolean spoolOutput) { + final OutputStream out; + if (spoolOutput) { + File spoolFile = new File(getProject().getBuildDir() + "/buffered-output/" + this.getName()); + out = new LazyFileOutputStream(spoolFile); + outputLogger = logger -> { + try { + // the file may not exist if the command never output anything + if (Files.exists(spoolFile.toPath())) { + Files.lines(spoolFile.toPath()).forEach(logger::error); } + } catch (IOException e) { + throw new RuntimeException("could not log", e); } - ); + }; + } else { + out = new ByteArrayOutputStream(); + outputLogger = logger -> logger.error(((ByteArrayOutputStream) out).toString(StandardCharsets.UTF_8)); } + setStandardOutput(out); + setErrorOutput(out); } public static ExecResult exec(Project project, Action action) { diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.jdk-download.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.jdk-download.properties new file mode 100644 index 0000000000000..7568724a32a0b --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.jdk-download.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.JdkDownloadPlugin \ No newline at end of file diff --git a/buildSrc/src/main/resources/minimumGradleVersion b/buildSrc/src/main/resources/minimumGradleVersion index 11aa145248e68..04edabda285a6 100644 --- a/buildSrc/src/main/resources/minimumGradleVersion +++ b/buildSrc/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -5.3 \ No newline at end of file +5.4.1 \ No newline at end of file diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java index ee8e94c0e1ea1..bf982fa3aa2d2 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/BuildExamplePluginsIT.java @@ -75,7 +75,7 @@ public static Iterable parameters() { } public void testCurrentExamplePlugin() throws IOException { - FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot()); + FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot(), pathname -> pathname.getPath().contains("/build/") == false); adaptBuildScriptForTest(); @@ -99,6 +99,7 @@ private void adaptBuildScriptForTest() throws IOException { "buildscript {\n" + " repositories {\n" + " maven {\n" + + " name = \"test\"\n" + " url = '" + getLocalTestRepoPath() + "'\n" + " }\n" + " }\n" + @@ -117,12 +118,14 @@ private void adaptBuildScriptForTest() throws IOException { String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision"); if (luceneSnapshotRepo != null) { luceneSnapshotRepo = " maven {\n" + + " name \"lucene-snapshots\"\n" + " url \"https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" + " }\n"; } writeBuildScript("\n" + "repositories {\n" + " maven {\n" + + " name \"test\"\n" + " url \"" + getLocalTestRepoPath() + "\"\n" + " }\n" + " flatDir {\n" + @@ -153,5 +156,4 @@ private Path writeBuildScript(String script) { throw new RuntimeException(e); } } - } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java index 99afd0bcbe0ae..7968f4f57cf90 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; -import org.gradle.testkit.runner.GradleRunner; public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase { @@ -29,25 +28,19 @@ public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTe public static final String PROJECT_NAME = "elasticsearch-build-resources"; public void testUpToDateWithSourcesConfigured() { - GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) + getGradleRunner(PROJECT_NAME) .withArguments("clean", "-s") - .withPluginClasspath() .build(); - BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) + BuildResult result = getGradleRunner(PROJECT_NAME) .withArguments("buildResources", "-s", "-i") - .withPluginClasspath() .build(); assertTaskSuccessful(result, ":buildResources"); assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); - result = GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) + result = getGradleRunner(PROJECT_NAME) .withArguments("buildResources", "-s", "-i") - .withPluginClasspath() .build(); assertTaskUpToDate(result, ":buildResources"); assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); @@ -55,10 +48,8 @@ public void testUpToDateWithSourcesConfigured() { } public void testImplicitTaskDependencyCopy() { - BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) + BuildResult result = getGradleRunner(PROJECT_NAME) .withArguments("clean", "sampleCopyAll", "-s", "-i") - .withPluginClasspath() .build(); assertTaskSuccessful(result, ":buildResources"); @@ -69,10 +60,8 @@ public void testImplicitTaskDependencyCopy() { } public void testImplicitTaskDependencyInputFileOfOther() { - BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) + BuildResult result = getGradleRunner(PROJECT_NAME) .withArguments("clean", "sample", "-s", "-i") - .withPluginClasspath() .build(); assertTaskSuccessful(result, ":sample"); @@ -81,11 +70,12 @@ public void testImplicitTaskDependencyInputFileOfOther() { } public void testIncorrectUsage() { - BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir(PROJECT_NAME)) - .withArguments("noConfigAfterExecution", "-s", "-i") - .withPluginClasspath() - .buildAndFail(); - assertOutputContains("buildResources can't be configured after the task ran"); + assertOutputContains( + getGradleRunner(PROJECT_NAME) + .withArguments("noConfigAfterExecution", "-s", "-i") + .buildAndFail() + .getOutput(), + "buildResources can't be configured after the task ran" + ); } } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java new file mode 100644 index 0000000000000..9d612da610aca --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle; + +import com.github.tomakehurst.wiremock.WireMockServer; +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.head; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static org.hamcrest.CoreMatchers.equalTo; + +public class JdkDownloadPluginIT extends GradleIntegrationTestCase { + + private static final String OLD_JDK_VERSION = "1+99"; + private static final String JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde"; + private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)"); + private static final Pattern NUM_CONFIGS_LOGLINE = Pattern.compile("NUM CONFIGS: (.*)"); + + public void testLinuxExtraction() throws IOException { + assertExtraction("getLinuxJdk", "linux", "bin/java", JDK_VERSION); + } + + public void testDarwinExtraction() throws IOException { + assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", JDK_VERSION); + } + + public void testWindowsExtraction() throws IOException { + assertExtraction("getWindowsJdk", "windows", "bin/java", JDK_VERSION); + } + + public void testLinuxExtractionOldVersion() throws IOException { + assertExtraction("getLinuxJdk", "linux", "bin/java", OLD_JDK_VERSION); + } + + public void testDarwinExtractionOldVersion() throws IOException { + assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", OLD_JDK_VERSION); + } + + public void testWindowsExtractionOldVersion() throws IOException { + assertExtraction("getWindowsJdk", "windows", "bin/java", OLD_JDK_VERSION); + } + + public void testCrossProjectReuse() throws IOException { + runBuild("numConfigurations", "linux", result -> { + Matcher matcher = NUM_CONFIGS_LOGLINE.matcher(result.getOutput()); + assertTrue("could not find num configs in output: " + result.getOutput(), matcher.find()); + assertThat(Integer.parseInt(matcher.group(1)), equalTo(6)); // 3 import configs, 3 export configs + }, JDK_VERSION); + } + + public void assertExtraction(String taskname, String platform, String javaBin, String version) throws IOException { + runBuild(taskname, platform, result -> { + Matcher matcher = JDK_HOME_LOGLINE.matcher(result.getOutput()); + assertTrue("could not find jdk home in output: " + result.getOutput(), matcher.find()); + String jdkHome = matcher.group(1); + Path javaPath = Paths.get(jdkHome, javaBin); + assertTrue(javaPath.toString(), Files.exists(javaPath)); + }, version); + } + + private void runBuild(String taskname, String platform, Consumer assertions, String version) throws IOException { + WireMockServer wireMock = new WireMockServer(0); + try { + String extension = platform.equals("windows") ? "zip" : "tar.gz"; + boolean isOld = version.equals(OLD_JDK_VERSION); + String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + platform + "-x64_bin." + extension; + final byte[] filebytes; + try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_openjdk_" + platform + "." + extension)) { + filebytes = stream.readAllBytes(); + } + String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; + String urlPath = "/java/GA/" + versionPath + "/GPL/" + filename; + wireMock.stubFor(head(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200))); + wireMock.stubFor(get(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200).withBody(filebytes))); + wireMock.start(); + + GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir("jdk-download")) + .withArguments(taskname, + "-Dlocal.repo.path=" + getLocalTestRepoPath(), + "-Dtests.jdk_version=" + version, + "-Dtests.jdk_repo=" + wireMock.baseUrl(), + "-i") + .withPluginClasspath(); + + BuildResult result = runner.build(); + assertions.accept(result); + } catch (Exception e) { + // for debugging + System.err.println("missed requests: " + wireMock.findUnmatchedRequests().getRequests()); + throw e; + } finally { + wireMock.stop(); + } + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginTests.java new file mode 100644 index 0000000000000..c6ca817e759fa --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/JdkDownloadPluginTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle; + +import org.elasticsearch.gradle.test.GradleUnitTestCase; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Project; +import org.gradle.testfixtures.ProjectBuilder; +import org.junit.BeforeClass; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class JdkDownloadPluginTests extends GradleUnitTestCase { + private static Project rootProject; + + @BeforeClass + public static void setupRoot() { + rootProject = ProjectBuilder.builder().build(); + } + + public void testMissingVersion() { + assertJdkError(createProject(), "testjdk", null, "linux", "version not specified for jdk [testjdk]"); + } + + public void testMissingPlatform() { + assertJdkError(createProject(), "testjdk", "11.0.2+33", null, "platform not specified for jdk [testjdk]"); + } + + public void testUnknownPlatform() { + assertJdkError(createProject(), "testjdk", "11.0.2+33", "unknown", + "unknown platform [unknown] for jdk [testjdk], must be one of [linux, windows, darwin]"); + } + + public void testBadVersionFormat() { + assertJdkError(createProject(), "testjdk", "badversion", "linux", "malformed version [badversion] for jdk [testjdk]"); + } + + private void assertJdkError(Project project, String name, String version, String platform, String message) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createJdk(project, name, version, platform)); + assertThat(e.getMessage(), equalTo(message)); + } + + private void createJdk(Project project, String name, String version, String platform) { + @SuppressWarnings("unchecked") + NamedDomainObjectContainer jdks = (NamedDomainObjectContainer) project.getExtensions().getByName("jdks"); + jdks.create(name, jdk -> { + if (version != null) { + jdk.setVersion(version); + } + if (platform != null) { + jdk.setPlatform(platform); + } + }).finalizeValues(); + } + + private Project createProject() { + Project project = ProjectBuilder.builder().withParent(rootProject).build(); + project.getPlugins().apply("elasticsearch.jdk-download"); + return project; + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/JarHellTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/JarHellTaskIT.java index e5624a15d92df..d45028d844542 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/JarHellTaskIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/precommit/JarHellTaskIT.java @@ -2,7 +2,6 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; -import org.gradle.testkit.runner.GradleRunner; /* * Licensed to Elasticsearch under one or more contributor @@ -25,10 +24,8 @@ public class JarHellTaskIT extends GradleIntegrationTestCase { public void testJarHellDetected() { - BuildResult result = GradleRunner.create() - .withProjectDir(getProjectDir("jarHell")) + BuildResult result = getGradleRunner("jarHell") .withArguments("clean", "precommit", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath()) - .withPluginClasspath() .buildAndFail(); assertTaskFailed(result, ":jarHell"); diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java index c3262ee1e26e6..0fc26f0284c44 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/BaseTestCase.java @@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.RandomizedRunner; import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; +import junit.framework.AssertionFailedError; import org.junit.Assert; import org.junit.runner.RunWith; @@ -32,4 +33,24 @@ }) @ThreadLeakLingering(linger = 5000) // wait for "Connection worker" to die public abstract class BaseTestCase extends Assert { + + // add expectThrows from junit 5 + @FunctionalInterface + public interface ThrowingRunnable { + void run() throws Throwable; + } + public static T expectThrows(Class expectedType, ThrowingRunnable runnable) { + try { + runnable.run(); + } catch (Throwable e) { + if (expectedType.isInstance(e)) { + return expectedType.cast(e); + } + AssertionFailedError assertion = + new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e); + assertion.initCause(e); + throw assertion; + } + throw new AssertionFailedError("Expected exception "+ expectedType.getSimpleName() + " but no exception was thrown"); + } } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java index f7a0382cec775..46a9194780c2a 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java @@ -4,8 +4,12 @@ import org.gradle.testkit.runner.BuildTask; import org.gradle.testkit.runner.GradleRunner; import org.gradle.testkit.runner.TaskOutcome; +import org.junit.Rule; +import org.junit.rules.TemporaryFolder; import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; @@ -16,6 +20,9 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase { + @Rule + public TemporaryFolder testkitTmpDir = new TemporaryFolder(); + protected File getProjectDir(String name) { File root = new File("src/testKit/"); if (root.exists() == false) { @@ -26,9 +33,16 @@ protected File getProjectDir(String name) { } protected GradleRunner getGradleRunner(String sampleProject) { + File testkit; + try { + testkit = testkitTmpDir.newFolder(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } return GradleRunner.create() .withProjectDir(getProjectDir(sampleProject)) - .withPluginClasspath(); + .withPluginClasspath() + .withTestKitDir(testkit); } protected File getBuildDir(String name) { diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java index 84b13340c35cf..c9086d1459afd 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java @@ -21,12 +21,21 @@ import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; +import org.junit.Before; import org.junit.Ignore; + import java.util.Arrays; public class TestClustersPluginIT extends GradleIntegrationTestCase { + private GradleRunner runner; + + @Before + public void setUp() throws Exception { + runner = getGradleRunner("testclusters"); + } + public void testListClusters() { BuildResult result = getTestClustersRunner("listTestClusters").build(); @@ -190,10 +199,7 @@ private GradleRunner getTestClustersRunner(String... tasks) { arguments[tasks.length] = "-s"; arguments[tasks.length + 1] = "-i"; arguments[tasks.length + 2] = "-Dlocal.repo.path=" + getLocalTestRepoPath(); - return GradleRunner.create() - .withProjectDir(getProjectDir("testclusters")) - .withArguments(arguments) - .withPluginClasspath(); + return runner.withArguments(arguments); } private void assertStartedAndStoppedOnce(BuildResult result, String nodeName) { diff --git a/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_linux.tar.gz b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_linux.tar.gz new file mode 100644 index 0000000000000..d38b03a4c2a48 Binary files /dev/null and b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_linux.tar.gz differ diff --git a/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_osx.tar.gz b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_osx.tar.gz new file mode 100644 index 0000000000000..9ac1da5e18146 Binary files /dev/null and b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_osx.tar.gz differ diff --git a/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_windows.zip b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_windows.zip new file mode 100644 index 0000000000000..61b6b867397d6 Binary files /dev/null and b/buildSrc/src/test/resources/org/elasticsearch/gradle/fake_openjdk_windows.zip differ diff --git a/buildSrc/src/testKit/elasticsearch.build/build.gradle b/buildSrc/src/testKit/elasticsearch.build/build.gradle index 8020935f67e80..7a68fe59baab6 100644 --- a/buildSrc/src/testKit/elasticsearch.build/build.gradle +++ b/buildSrc/src/testKit/elasticsearch.build/build.gradle @@ -16,6 +16,7 @@ repositories { jcenter() repositories { maven { + name "local-repo" url System.getProperty("local.repo.path") } } diff --git a/buildSrc/src/testKit/jarHell/build.gradle b/buildSrc/src/testKit/jarHell/build.gradle index cd423c9f99f81..cb12ce03f5191 100644 --- a/buildSrc/src/testKit/jarHell/build.gradle +++ b/buildSrc/src/testKit/jarHell/build.gradle @@ -15,6 +15,7 @@ repositories { jcenter() repositories { maven { + name "local" url System.getProperty("local.repo.path") } } diff --git a/buildSrc/src/testKit/jdk-download/build.gradle b/buildSrc/src/testKit/jdk-download/build.gradle new file mode 100644 index 0000000000000..eb2aa0260a331 --- /dev/null +++ b/buildSrc/src/testKit/jdk-download/build.gradle @@ -0,0 +1,15 @@ + +project.gradle.projectsEvaluated { + // wire the jdk repo to wiremock + String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo')) + String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) + println rootProject.repositories.asMap.keySet() + IvyArtifactRepository repository = (IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVersion}") + repository.setUrl(fakeJdkRepo) +} + +task numConfigurations { + doLast { + println "NUM CONFIGS: ${project.configurations.size()}" + } +} \ No newline at end of file diff --git a/buildSrc/src/testKit/jdk-download/reuse/build.gradle b/buildSrc/src/testKit/jdk-download/reuse/build.gradle new file mode 100644 index 0000000000000..8a26a8121e9e5 --- /dev/null +++ b/buildSrc/src/testKit/jdk-download/reuse/build.gradle @@ -0,0 +1,9 @@ +evaluationDependsOn ':subproj' + +String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) +jdks { + linux_jdk { + version = fakeJdkVersion + platform = "linux" + } +} \ No newline at end of file diff --git a/buildSrc/src/testKit/jdk-download/settings.gradle b/buildSrc/src/testKit/jdk-download/settings.gradle new file mode 100644 index 0000000000000..028de479afe30 --- /dev/null +++ b/buildSrc/src/testKit/jdk-download/settings.gradle @@ -0,0 +1 @@ +include 'subproj' \ No newline at end of file diff --git a/buildSrc/src/testKit/jdk-download/subproj/build.gradle b/buildSrc/src/testKit/jdk-download/subproj/build.gradle new file mode 100644 index 0000000000000..8e8b5435b4a67 --- /dev/null +++ b/buildSrc/src/testKit/jdk-download/subproj/build.gradle @@ -0,0 +1,41 @@ +plugins { + id 'elasticsearch.jdk-download' +} + + +String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) +jdks { + linux { + version = fakeJdkVersion + platform = "linux" + } + darwin { + version = fakeJdkVersion + platform = "darwin" + } + windows { + version = fakeJdkVersion + platform = "windows" + } +} + +task getLinuxJdk { + dependsOn jdks.linux + doLast { + println "JDK HOME: " + jdks.linux + } +} + +task getDarwinJdk { + dependsOn jdks.darwin + doLast { + println "JDK HOME: " + jdks.darwin + } +} + +task getWindowsJdk { + dependsOn jdks.windows + doLast { + println "JDK HOME: " + jdks.windows + } +} \ No newline at end of file diff --git a/buildSrc/src/testKit/testclusters/build.gradle b/buildSrc/src/testKit/testclusters/build.gradle index d0a2eb4c3056c..e4f912a3d7a63 100644 --- a/buildSrc/src/testKit/testclusters/build.gradle +++ b/buildSrc/src/testKit/testclusters/build.gradle @@ -9,11 +9,13 @@ allprojects { all -> dir System.getProperty("test.local-test-downloads-path") } maven { + name "local" url System.getProperty("local.repo.path") } String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision") if (luceneSnapshotRevision != null) { maven { + name "lucene-snapshots" url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision } } diff --git a/buildSrc/src/testKit/thirdPartyAudit/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/build.gradle index 42e0a22cceaa5..725be970fd952 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/build.gradle @@ -14,6 +14,7 @@ repositories { * - version 0.0.2 has the same class and one extra file just to make the jar different */ maven { + name = "local-test" url = file("sample_jars/build/testrepo") } jcenter() diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 8f7911574979d..471cb3a705cf5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -21,7 +21,7 @@ slf4j = 1.6.2 jna = 4.5.1 netty = 4.1.35.Final -joda = 2.10.1 +joda = 2.10.2 # when updating this version, you need to ensure compatibility with: # - plugins/ingest-attachment (transitive dependency, check the upstream POM) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java index e7e515594a55d..9b390e1ffddbc 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java @@ -20,17 +20,23 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.io.stream.Writeable; public class NoopSearchAction extends Action { public static final NoopSearchAction INSTANCE = new NoopSearchAction(); public static final String NAME = "mock:data/read/search"; - public NoopSearchAction() { + private NoopSearchAction() { super(NAME); } @Override public SearchResponse newResponse() { - return new SearchResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return SearchResponse::new; } } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index a5035a70bcee2..c9a3fc486f1da 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -63,9 +63,13 @@ dependencies { testCompile "junit:junit:${versions.junit}" //this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs testCompile "org.elasticsearch:rest-api-spec:${version}" - // Needed for serialization tests: + // Needed for serialization tests: // (In order to serialize a server side class to a client side class or the other way around) - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + if (isEclipse == false || project.path == ":client:rest-high-level-tests") { + testCompile("org.elasticsearch.plugin:x-pack-core:${version}") { + exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client' + } + } restSpec "org.elasticsearch:rest-api-spec:${version}" } @@ -92,6 +96,20 @@ forbiddenApisMain { addSignatureFiles 'http-signatures' signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt') } + +if (isEclipse) { + // in eclipse the project is under a fake root, we need to change around the source sets + sourceSets { + if (project.path == ":client:rest-high-level") { + main.java.srcDirs = ['java'] + main.resources.srcDirs = ['resources'] + } else { + test.java.srcDirs = ['java'] + test.resources.srcDirs = ['resources'] + } + } +} + File nodeCert = file("./testnode.crt") File nodeTrustStore = file("./testnode.jks") diff --git a/client/rest-high-level/src/main/eclipse-build.gradle b/client/rest-high-level/src/main/eclipse-build.gradle new file mode 100644 index 0000000000000..6bc7562f7fded --- /dev/null +++ b/client/rest-high-level/src/main/eclipse-build.gradle @@ -0,0 +1,2 @@ +// this is just shell gradle file for eclipse to have separate projects for core src and tests +apply from: '../../build.gradle' diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java index 309a37fedf8cd..0e887994da7bf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java @@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest; @@ -57,11 +58,11 @@ static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) { .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId())) .build(); Request request = new Request(HttpGet.METHOD_NAME, endpoint); - if (getRequest.getFrom() != null) { - request.addParameter("from", getRequest.getFrom().toString()); + if (getRequest.getPageParams() != null && getRequest.getPageParams().getFrom() != null) { + request.addParameter(PageParams.FROM.getPreferredName(), getRequest.getPageParams().getFrom().toString()); } - if (getRequest.getSize() != null) { - request.addParameter("size", getRequest.getSize().toString()); + if (getRequest.getPageParams() != null && getRequest.getPageParams().getSize() != null) { + request.addParameter(PageParams.SIZE.getPreferredName(), getRequest.getPageParams().getSize().toString()); } return request; } @@ -120,6 +121,13 @@ static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest stat .addPathPart(statsRequest.getId()) .addPathPartAsIs("_stats") .build(); - return new Request(HttpGet.METHOD_NAME, endpoint); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getFrom() != null) { + request.addParameter(PageParams.FROM.getPreferredName(), statsRequest.getPageParams().getFrom().toString()); + } + if (statsRequest.getPageParams() != null && statsRequest.getPageParams().getSize() != null) { + request.addParameter(PageParams.SIZE.getPreferredName(), statsRequest.getPageParams().getSize().toString()); + } + return request; } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 073b92f84a3a3..99884ee49c868 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -27,6 +27,7 @@ import org.apache.http.nio.entity.NByteArrayEntity; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.RequestConverters.EndpointBuilder; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest; @@ -71,7 +72,6 @@ import org.elasticsearch.client.ml.UpdateFilterRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java index 730ea7e95de12..6d7d801bcb4d2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ValidationException.java @@ -21,12 +21,36 @@ import org.elasticsearch.common.Nullable; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; /** * Encapsulates an accumulation of validation errors */ public class ValidationException extends IllegalArgumentException { + + /** + * Creates {@link ValidationException} instance initialized with given error messages. + * @param error the errors to add + * @return {@link ValidationException} instance + */ + public static ValidationException withError(String... error) { + return withErrors(Arrays.asList(error)); + } + + /** + * Creates {@link ValidationException} instance initialized with given error messages. + * @param errors the list of errors to add + * @return {@link ValidationException} instance + */ + public static ValidationException withErrors(List errors) { + ValidationException e = new ValidationException(); + for (String error : errors) { + e.addValidationError(error); + } + return e; + } + private final List validationErrors = new ArrayList<>(); /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/PageParams.java similarity index 96% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/core/PageParams.java index 52d54188f7007..64b42c4ef7b75 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/util/PageParams.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/PageParams.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.client.ml.job.util; +package org.elasticsearch.client.core; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -57,11 +57,11 @@ public PageParams(@Nullable Integer from, @Nullable Integer size) { this.size = size; } - public int getFrom() { + public Integer getFrom() { return from; } - public int getSize() { + public Integer getSize() { return size; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java index 9577a0f5c72bf..c50f37a27c885 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformRequest.java @@ -21,6 +21,7 @@ import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; +import org.elasticsearch.client.core.PageParams; import java.util.Arrays; import java.util.List; @@ -29,10 +30,6 @@ public class GetDataFrameTransformRequest implements Validatable { - private final List ids; - private Integer from; - private Integer size; - /** * Helper method to create a request that will get ALL Data Frame Transforms * @return new {@link GetDataFrameTransformRequest} object for the id "_all" @@ -41,6 +38,9 @@ public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() { return new GetDataFrameTransformRequest("_all"); } + private final List ids; + private PageParams pageParams; + public GetDataFrameTransformRequest(String... ids) { this.ids = Arrays.asList(ids); } @@ -49,20 +49,12 @@ public List getId() { return ids; } - public Integer getFrom() { - return from; - } - - public void setFrom(Integer from) { - this.from = from; - } - - public Integer getSize() { - return size; + public PageParams getPageParams() { + return pageParams; } - public void setSize(Integer size) { - this.size = size; + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; } @Override @@ -78,7 +70,7 @@ public Optional validate() { @Override public int hashCode() { - return Objects.hash(ids); + return Objects.hash(ids, pageParams); } @Override @@ -91,6 +83,6 @@ public boolean equals(Object obj) { return false; } GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj; - return Objects.equals(ids, other.ids); + return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java index e90c8a1e276d2..4a105f7b40c7e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/GetDataFrameTransformStatsRequest.java @@ -21,12 +21,14 @@ import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; +import org.elasticsearch.client.core.PageParams; import java.util.Objects; import java.util.Optional; public class GetDataFrameTransformStatsRequest implements Validatable { private final String id; + private PageParams pageParams; public GetDataFrameTransformStatsRequest(String id) { this.id = id; @@ -36,6 +38,14 @@ public String getId() { return id; } + public PageParams getPageParams() { + return pageParams; + } + + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + @Override public Optional validate() { if (id == null) { @@ -49,7 +59,7 @@ public Optional validate() { @Override public int hashCode() { - return Objects.hash(id); + return Objects.hash(id, pageParams); } @Override @@ -62,6 +72,6 @@ public boolean equals(Object obj) { return false; } GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj; - return Objects.equals(id, other.id); + return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfig.java index 0c3a6e3ea890b..6fdbeb8a43a20 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfig.java @@ -39,25 +39,29 @@ public class PivotConfig implements ToXContentObject { private static final ParseField GROUP_BY = new ParseField("group_by"); private static final ParseField AGGREGATIONS = new ParseField("aggregations"); + private static final ParseField MAX_PAGE_SEARCH_SIZE = new ParseField("max_page_search_size"); private final GroupConfig groups; private final AggregationConfig aggregationConfig; + private final Integer maxPageSearchSize; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("pivot_config", true, - args -> new PivotConfig((GroupConfig) args[0], (AggregationConfig) args[1])); + args -> new PivotConfig((GroupConfig) args[0], (AggregationConfig) args[1], (Integer) args[2])); static { PARSER.declareObject(constructorArg(), (p, c) -> (GroupConfig.fromXContent(p)), GROUP_BY); PARSER.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); + PARSER.declareInt(optionalConstructorArg(), MAX_PAGE_SEARCH_SIZE); } public static PivotConfig fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } - PivotConfig(GroupConfig groups, final AggregationConfig aggregationConfig) { + PivotConfig(GroupConfig groups, final AggregationConfig aggregationConfig, Integer maxPageSearchSize) { this.groups = groups; this.aggregationConfig = aggregationConfig; + this.maxPageSearchSize = maxPageSearchSize; } @Override @@ -65,6 +69,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(GROUP_BY.getPreferredName(), groups); builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig); + if (maxPageSearchSize != null) { + builder.field(MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize); + } builder.endObject(); return builder; } @@ -77,6 +84,10 @@ public GroupConfig getGroupConfig() { return groups; } + public Integer getMaxPageSearchSize() { + return maxPageSearchSize; + } + @Override public boolean equals(Object other) { if (this == other) { @@ -89,12 +100,14 @@ public boolean equals(Object other) { final PivotConfig that = (PivotConfig) other; - return Objects.equals(this.groups, that.groups) && Objects.equals(this.aggregationConfig, that.aggregationConfig); + return Objects.equals(this.groups, that.groups) + && Objects.equals(this.aggregationConfig, that.aggregationConfig) + && Objects.equals(this.maxPageSearchSize, that.maxPageSearchSize); } @Override public int hashCode() { - return Objects.hash(groups, aggregationConfig); + return Objects.hash(groups, aggregationConfig, maxPageSearchSize); } public static Builder builder() { @@ -104,6 +117,7 @@ public static Builder builder() { public static class Builder { private GroupConfig groups; private AggregationConfig aggregationConfig; + private Integer maxPageSearchSize; /** * Set how to group the source data @@ -135,8 +149,22 @@ public Builder setAggregations(AggregatorFactories.Builder aggregations) { return this; } + /** + * Sets the paging maximum paging maxPageSearchSize that date frame transform can use when + * pulling the data from the source index. + * + * If OOM is triggered, the paging maxPageSearchSize is dynamically reduced so that the transform can continue to gather data. + * + * @param maxPageSearchSize Integer value between 10 and 10_000 + * @return the {@link Builder} with the paging maxPageSearchSize set. + */ + public Builder setMaxPageSearchSize(Integer maxPageSearchSize) { + this.maxPageSearchSize = maxPageSearchSize; + return this; + } + public PivotConfig build() { - return new PivotConfig(groups, aggregationConfig); + return new PivotConfig(groups, aggregationConfig, maxPageSearchSize); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java index 5b4438fa2a1a4..655ecb2b9750f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java @@ -20,9 +20,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java index 5730e132df1a1..05bc234178ebd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java @@ -21,9 +21,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java index 322efc19927dd..bcbae91693b93 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java @@ -21,13 +21,12 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.client.ml.job.util.PageParams; - import java.io.IOException; import java.util.Objects; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java index 4fc68793f0060..b1000c3e4eb4d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java index 54bacdae1083b..a1808af23fcac 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java index cdcd40a41f853..8e5a45e7ba386 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java index 8743f3043e589..acb138ac442f0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java index f70459ab9b165..7aaa80ca34526 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java @@ -19,8 +19,8 @@ package org.elasticsearch.client.ml; import org.elasticsearch.client.Validatable; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java index c9a34fe5c98d9..6ea3cede0e3f1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java @@ -47,6 +47,8 @@ public class ModelSizeStats implements ToXContentObject { * Field Names */ public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); + public static final ParseField MODEL_BYTES_EXCEEDED_FIELD = new ParseField("model_bytes_exceeded"); + public static final ParseField MODEL_BYTES_MEMORY_LIMIT_FIELD = new ParseField("model_bytes_memory_limit"); public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); @@ -61,6 +63,8 @@ public class ModelSizeStats implements ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); + PARSER.declareLong(Builder::setModelBytesExceeded, MODEL_BYTES_EXCEEDED_FIELD); + PARSER.declareLong(Builder::setModelBytesMemoryLimit, MODEL_BYTES_MEMORY_LIMIT_FIELD); PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); @@ -97,6 +101,8 @@ public String toString() { private final String jobId; private final long modelBytes; + private final Long modelBytesExceeded; + private final Long modelBytesMemoryLimit; private final long totalByFieldCount; private final long totalOverFieldCount; private final long totalPartitionFieldCount; @@ -105,11 +111,13 @@ public String toString() { private final Date timestamp; private final Date logTime; - private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, long totalOverFieldCount, - long totalPartitionFieldCount, long bucketAllocationFailuresCount, MemoryStatus memoryStatus, - Date timestamp, Date logTime) { + private ModelSizeStats(String jobId, long modelBytes, Long modelBytesExceeded, Long modelBytesMemoryLimit, long totalByFieldCount, + long totalOverFieldCount, long totalPartitionFieldCount, long bucketAllocationFailuresCount, + MemoryStatus memoryStatus, Date timestamp, Date logTime) { this.jobId = jobId; this.modelBytes = modelBytes; + this.modelBytesExceeded = modelBytesExceeded; + this.modelBytesMemoryLimit = modelBytesMemoryLimit; this.totalByFieldCount = totalByFieldCount; this.totalOverFieldCount = totalOverFieldCount; this.totalPartitionFieldCount = totalPartitionFieldCount; @@ -126,6 +134,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); + if (modelBytesExceeded != null) { + builder.field(MODEL_BYTES_EXCEEDED_FIELD.getPreferredName(), modelBytesExceeded); + } + if (modelBytesMemoryLimit != null) { + builder.field(MODEL_BYTES_MEMORY_LIMIT_FIELD.getPreferredName(), modelBytesMemoryLimit); + } builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); @@ -148,6 +162,14 @@ public long getModelBytes() { return modelBytes; } + public Long getModelBytesExceeded() { + return modelBytesExceeded; + } + + public Long getModelBytesMemoryLimit() { + return modelBytesMemoryLimit; + } + public long getTotalByFieldCount() { return totalByFieldCount; } @@ -188,8 +210,8 @@ public Date getLogTime() { @Override public int hashCode() { - return Objects.hash(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, - this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + return Objects.hash(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount, + totalPartitionFieldCount, this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); } /** @@ -207,7 +229,8 @@ public boolean equals(Object other) { ModelSizeStats that = (ModelSizeStats) other; - return this.modelBytes == that.modelBytes && this.totalByFieldCount == that.totalByFieldCount + return this.modelBytes == that.modelBytes && Objects.equals(this.modelBytesExceeded, that.modelBytesExceeded) + && Objects.equals(this.modelBytesMemoryLimit, that.modelBytesMemoryLimit) && this.totalByFieldCount == that.totalByFieldCount && this.totalOverFieldCount == that.totalOverFieldCount && this.totalPartitionFieldCount == that.totalPartitionFieldCount && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount && Objects.equals(this.memoryStatus, that.memoryStatus) && Objects.equals(this.timestamp, that.timestamp) @@ -219,6 +242,8 @@ public static class Builder { private final String jobId; private long modelBytes; + private Long modelBytesExceeded; + private Long modelBytesMemoryLimit; private long totalByFieldCount; private long totalOverFieldCount; private long totalPartitionFieldCount; @@ -236,6 +261,8 @@ public Builder(String jobId) { public Builder(ModelSizeStats modelSizeStats) { this.jobId = modelSizeStats.jobId; this.modelBytes = modelSizeStats.modelBytes; + this.modelBytesExceeded = modelSizeStats.modelBytesExceeded; + this.modelBytesMemoryLimit = modelSizeStats.modelBytesMemoryLimit; this.totalByFieldCount = modelSizeStats.totalByFieldCount; this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; @@ -250,6 +277,16 @@ public Builder setModelBytes(long modelBytes) { return this; } + public Builder setModelBytesExceeded(long modelBytesExceeded) { + this.modelBytesExceeded = modelBytesExceeded; + return this; + } + + public Builder setModelBytesMemoryLimit(long modelBytesMemoryLimit) { + this.modelBytesMemoryLimit = modelBytesMemoryLimit; + return this; + } + public Builder setTotalByFieldCount(long totalByFieldCount) { this.totalByFieldCount = totalByFieldCount; return this; @@ -287,8 +324,8 @@ public Builder setLogTime(Date logTime) { } public ModelSizeStats build() { - return new ModelSizeStats(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, - bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + return new ModelSizeStats(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount, + totalPartitionFieldCount, bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); } } } diff --git a/client/rest-high-level/src/test/eclipse-build.gradle b/client/rest-high-level/src/test/eclipse-build.gradle new file mode 100644 index 0000000000000..56d355658879c --- /dev/null +++ b/client/rest-high-level/src/test/eclipse-build.gradle @@ -0,0 +1,6 @@ +// this is just shell gradle file for eclipse to have separate projects for core src and tests +apply from: '../../build.gradle' + +dependencies { + testCompile project(':client:rest-high-level') +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java index 8c6b1c6045855..7a1e5e2389316 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java @@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformStatsRequest; @@ -43,7 +44,9 @@ import java.io.IOException; import java.util.Collections; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; public class DataFrameRequestConvertersTests extends ESTestCase { @@ -147,6 +150,23 @@ public void testGetDataFrameTransformStats() { assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats")); + + assertFalse(request.getParameters().containsKey("from")); + assertFalse(request.getParameters().containsKey("size")); + + getStatsRequest.setPageParams(new PageParams(0, null)); + request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + assertThat(request.getParameters(), hasEntry("from", "0")); + assertEquals(null, request.getParameters().get("size")); + + getStatsRequest.setPageParams(new PageParams(null, 50)); + request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + assertEquals(null, request.getParameters().get("from")); + assertThat(request.getParameters(), hasEntry("size", "50")); + + getStatsRequest.setPageParams(new PageParams(0, 10)); + request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); } public void testGetDataFrameTransform() { @@ -159,11 +179,19 @@ public void testGetDataFrameTransform() { assertFalse(request.getParameters().containsKey("from")); assertFalse(request.getParameters().containsKey("size")); - getRequest.setFrom(0); - getRequest.setSize(10); + getRequest.setPageParams(new PageParams(0, null)); + request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + assertThat(request.getParameters(), hasEntry("from", "0")); + assertEquals(null, request.getParameters().get("size")); + + getRequest.setPageParams(new PageParams(null, 50)); + request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + assertEquals(null, request.getParameters().get("from")); + assertThat(request.getParameters(), hasEntry("size", "50")); + + getRequest.setPageParams(new PageParams(0, 10)); request = DataFrameRequestConverters.getDataFrameTransform(getRequest); - assertEquals("0", request.getParameters().get("from")); - assertEquals("10", request.getParameters().get("size")); + assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); } public void testGetDataFrameTransform_givenMulitpleIds() { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java index bb1924b8b4957..40cd6f454cdab 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.IndexerState; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse; @@ -71,6 +72,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { @@ -140,7 +142,8 @@ private void indexData(String indexName) throws IOException { @After public void cleanUpTransforms() throws IOException { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform(new StopDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().dataFrame().stopDataFrameTransform( + new StopDataFrameTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { @@ -217,8 +220,7 @@ public void testGetAllAndPageTransforms() throws IOException { assertThat(getResponse.getTransformConfigurations(), hasSize(2)); assertEquals(transform, getResponse.getTransformConfigurations().get(1)); - getRequest.setFrom(0); - getRequest.setSize(1); + getRequest.setPageParams(new PageParams(0,1)); getResponse = execute(getRequest, client::getDataFrameTransform, client::getDataFrameTransformAsync); assertNull(getResponse.getInvalidTransforms()); @@ -263,9 +265,10 @@ public void testStartStop() throws IOException { GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); assertThat(statsResponse.getTransformsStateAndStats(), hasSize(1)); - assertEquals(IndexerState.STARTED, statsResponse.getTransformsStateAndStats().get(0).getTransformState().getIndexerState()); + IndexerState indexerState = statsResponse.getTransformsStateAndStats().get(0).getTransformState().getIndexerState(); + assertThat(indexerState, is(oneOf(IndexerState.STARTED, IndexerState.INDEXING))); - StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id); + StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); StopDataFrameTransformResponse stopResponse = execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync); assertTrue(stopResponse.isStopped()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 11faaf879729d..fd867a12204d0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest; @@ -82,7 +83,6 @@ import org.elasticsearch.client.ml.job.config.JobUpdateTests; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.MlFilterTests; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index 9b364975c773a..34ca5cd2aa448 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; import org.elasticsearch.client.ml.GetCategoriesRequest; @@ -43,7 +44,6 @@ import org.elasticsearch.client.ml.job.results.Bucket; import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.junit.After; @@ -150,11 +150,15 @@ private void addCategoriesIndexRequests(BulkRequest bulkRequest) { private void addModelSnapshotIndexRequests(BulkRequest bulkRequest) { { + // Index a number of model snapshots, one of which contains the new model_size_stats fields + // 'model_bytes_exceeded' and 'model_bytes_memory_limit' that were introduced in 7.2.0. + // We want to verify that we can parse the snapshots whether or not these fields are present. IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX); indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", \"snapshot_id\":\"1541587919\"," + "\"snapshot_doc_count\":1, \"model_size_stats\":{\"job_id\":\"" + JOB_ID + "\", \"result_type\":\"model_size_stats\"," + - "\"model_bytes\":51722, \"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + + "\"model_bytes\":51722, \"model_bytes_exceeded\":10762, \"model_bytes_memory_limit\":40960, \"total_by_field_count\":3, " + + "\"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000," + " \"timestamp\":1519930800000},\"latest_record_time_stamp\":1519931700000, \"latest_result_time_stamp\":1519930800000," + " \"retain\":false }", XContentType.JSON); @@ -223,6 +227,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(10762L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(40960L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -241,6 +247,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(1).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(1).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -259,6 +267,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(2).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(2).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -288,6 +298,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(2).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(2).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytesExceeded(), equalTo(10762L)); + assertThat(response.snapshots().get(2).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(40960L)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(2).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -306,6 +318,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(1).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(1).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -324,6 +338,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -353,6 +369,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(10762L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(40960L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -383,6 +401,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -402,6 +422,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(1).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(1).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -430,6 +452,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -470,6 +494,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(10762L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(40960L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -488,6 +514,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(1).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(1).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(1).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(1).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); @@ -517,6 +545,8 @@ public void testGetModelSnapshots() throws IOException { assertThat(response.snapshots().get(0).getLatestResultTimeStamp(), equalTo(new Date(1519930800000L))); assertThat(response.snapshots().get(0).getModelSizeStats().getJobId(), equalTo(JOB_ID)); assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytes(), equalTo(51722L)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesExceeded(), equalTo(null)); + assertThat(response.snapshots().get(0).getModelSizeStats().getModelBytesMemoryLimit(), equalTo(null)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalByFieldCount(), equalTo(3L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalOverFieldCount(), equalTo(0L)); assertThat(response.snapshots().get(0).getModelSizeStats().getTotalPartitionFieldCount(), equalTo(2L)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index f7b7b148f660b..25e610d67814f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; @@ -112,7 +113,6 @@ import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.client.ml.job.stats.JobStats; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ValidationExceptionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ValidationExceptionTests.java new file mode 100644 index 0000000000000..cde360b029839 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ValidationExceptionTests.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; + +public class ValidationExceptionTests extends ESTestCase { + + private static final String ERROR = "some-error"; + private static final String OTHER_ERROR = "some-other-error"; + + public void testWithError() { + ValidationException e = ValidationException.withError(ERROR, OTHER_ERROR); + assertThat(e.validationErrors(), hasSize(2)); + assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR)); + } + + public void testWithErrors() { + ValidationException e = ValidationException.withErrors(Arrays.asList(ERROR, OTHER_ERROR)); + assertThat(e.validationErrors(), hasSize(2)); + assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR)); + } + + public void testAddValidationError() { + ValidationException e = new ValidationException(); + assertThat(e.validationErrors(), hasSize(0)); + e.addValidationError(ERROR); + assertThat(e.validationErrors(), hasSize(1)); + assertThat(e.validationErrors(), contains(ERROR)); + e.addValidationError(OTHER_ERROR); + assertThat(e.validationErrors(), hasSize(2)); + assertThat(e.validationErrors(), contains(ERROR, OTHER_ERROR)); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/MainResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/MainResponseTests.java index 24925e819a443..4a5cd2056655a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/MainResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/MainResponseTests.java @@ -38,7 +38,7 @@ protected org.elasticsearch.action.main.MainResponse createServerTestInstance() ClusterName clusterName = new ClusterName(randomAlphaOfLength(10)); String nodeName = randomAlphaOfLength(10); final String date = new Date(randomNonNegativeLong()).toString(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_1, Version.CURRENT); + Version version = VersionUtils.randomIndexCompatibleVersion(random()); Build build = new Build( Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(), version.toString() diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfigTests.java index d2e036d9f1ad2..5cafcb9f419b5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/dataframe/transforms/pivot/PivotConfigTests.java @@ -32,7 +32,9 @@ public class PivotConfigTests extends AbstractXContentTestCase { public static PivotConfig randomPivotConfig() { - return new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), + AggregationConfigTests.randomAggregationConfig(), + randomBoolean() ? null : randomIntBetween(10, 10_000)); } @Override diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java index 60dd2cb32eaab..6f7832cbf3cff 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.IndexerState; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.dataframe.DeleteDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformRequest; import org.elasticsearch.client.dataframe.GetDataFrameTransformResponse; @@ -75,7 +76,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest @After public void cleanUpTransforms() throws IOException { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform(new StopDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().dataFrame().stopDataFrameTransform( + new StopDataFrameTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { @@ -136,8 +138,9 @@ public void testPutDataFrameTransform() throws IOException, InterruptedException // end::put-data-frame-transform-agg-config // tag::put-data-frame-transform-pivot-config PivotConfig pivotConfig = PivotConfig.builder() - .setGroups(groupConfig) - .setAggregationConfig(aggConfig) + .setGroups(groupConfig) // <1> + .setAggregationConfig(aggConfig) // <2> + .setMaxPageSearchSize(1000) // <3> .build(); // end::put-data-frame-transform-pivot-config // tag::put-data-frame-transform-config @@ -554,7 +557,7 @@ public void onFailure(Exception e) { public void testGetDataFrameTransform() throws IOException, InterruptedException { createIndex("source-data"); - + QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder()); GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build(); @@ -585,8 +588,7 @@ public void testGetDataFrameTransform() throws IOException, InterruptedException // end::get-data-frame-transform-request // tag::get-data-frame-transform-request-options - request.setFrom(0); // <1> - request.setSize(100); // <2> + request.setPageParams(new PageParams(0, 100)); // <1> // end::get-data-frame-transform-request-options // tag::get-data-frame-transform-execute diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index f4e3f86196f29..fe7d04a4e0a8d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; @@ -137,7 +138,6 @@ import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.stats.JobStats; -import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetBucketsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetBucketsRequestTests.java index d637988691226..d8012247dc502 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetBucketsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetBucketsRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarEventsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarEventsRequestTests.java index a85eda1ac74aa..8bc9d808ee303 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarEventsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarEventsRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java index b7ca44fd5faf1..4ec6c72b83d54 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java index 7d9fe2b238f75..bcc697fdb209e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetInfluencersRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetInfluencersRequestTests.java index 94937cd78155f..5006a5f7d33d9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetInfluencersRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetInfluencersRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetModelSnapshotsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetModelSnapshotsRequestTests.java index 80a08f8c765ac..a15a80ac3d2f2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetModelSnapshotsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetModelSnapshotsRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetRecordsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetRecordsRequestTests.java index f6f4b49889a47..9b1c8699f4c9f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetRecordsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetRecordsRequestTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.client.core.PageParams; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/ModelSizeStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/ModelSizeStatsTests.java index 4a12a75f2b17d..8c43feb545a26 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/ModelSizeStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/process/ModelSizeStatsTests.java @@ -31,6 +31,8 @@ public class ModelSizeStatsTests extends AbstractXContentTestCase) () -> System.getProperty("java.version")); + String[] components = version.split("\\."); + if (components.length > 0) { + final int major = Integer.valueOf(components[0]); + if (major > 12) { + return "TLS"; + } else if (major == 12 && components.length > 2) { + final int minor = Integer.valueOf(components[1]); + if (minor > 0) { + return "TLS"; + } else { + String patch = components[2]; + final int index = patch.indexOf("_"); + if (index > -1) { + patch = patch.substring(0, index); + } + + if (Integer.valueOf(patch) >= 1) { + return "TLS"; + } + } + } + } + return "TLSv1.2"; + } } diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index c7c58ad4ebdc5..14a7e566587c4 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -61,7 +61,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla } if (jdk) { into('jdk') { - with jdkFiles(platform) + with jdkFiles(project, platform) } } into('') { @@ -295,6 +295,10 @@ subprojects { } } +subprojects { + group = "org.elasticsearch.distribution.${name.startsWith("oss-") ? "oss" : "default"}" +} + /***************************************************************************** * Rest test config * *****************************************************************************/ @@ -302,6 +306,8 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) { apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' + group = "org.elasticsearch.distribution.integ-test-zip" + integTest { includePackaged = true } @@ -321,23 +327,14 @@ configure(subprojects.findAll { it.name == 'integ-test-zip' }) { inputs.properties(project(':distribution').restTestExpansions) MavenFilteringHack.filter(it, project(':distribution').restTestExpansions) } -} -/***************************************************************************** - * Maven config * - *****************************************************************************/ -configure(subprojects.findAll { it.name.contains('zip') }) { - // only zip distributions go to maven + + // The integ-test-distribution is published to maven BuildPlugin.configurePomGeneration(project) apply plugin: 'nebula.info-scm' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' - // note: the group must be correct before applying the nexus plugin, or - // it will capture the wrong value... - String subgroup = project.name == 'integ-test-zip' ? 'integ-test-zip' : 'zip' - project.group = "org.elasticsearch.distribution.${subgroup}" - // make the pom file name use elasticsearch instead of the project name archivesBaseName = "elasticsearch${it.name.contains('oss') ? '-oss' : ''}" @@ -378,3 +375,4 @@ configure(subprojects.findAll { it.name.contains('zip') }) { } } } + diff --git a/distribution/build.gradle b/distribution/build.gradle index 6ba302a733a08..9c98101da5be3 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -17,18 +17,16 @@ * under the License. */ + +import org.apache.tools.ant.filters.FixCrLfFilter import org.elasticsearch.gradle.ConcatFilesTask import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.test.RunTask -import org.apache.tools.ant.filters.FixCrLfFilter import java.nio.file.Files -import java.nio.file.Path -import java.util.regex.Matcher -import java.util.regex.Pattern - +import java.nio.file.Path /***************************************************************************** * Third party dependencies report * *****************************************************************************/ @@ -219,70 +217,6 @@ xpack.subprojects.findAll { it.parent == xpack }.each { Project xpackModule -> copyLog4jProperties(buildDefaultLog4jConfig, xpackModule) } -/***************************************************************************** - * JDKs * - *****************************************************************************/ -// extract the bundled jdk version, broken into elements as: [feature, interim, update, build] -// Note the "patch" version is not yet handled here, as it has not yet been used by java. -Pattern JDK_VERSION = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+)@([a-f0-9]{32})?") -Matcher jdkVersionMatcher = JDK_VERSION.matcher(VersionProperties.bundledJdk) -if (jdkVersionMatcher.matches() == false) { - throw new IllegalArgumentException("Malformed jdk version [" + VersionProperties.bundledJdk + "]") -} -String jdkVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "") -String jdkMajor = jdkVersionMatcher.group(1) -String jdkBuild = jdkVersionMatcher.group(3) -String hash = jdkVersionMatcher.group(4) - -repositories { - // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back - ivy { - url "https://download.oracle.com" - metadataSources { - artifact() - } - patternLayout { - artifact "java/GA/jdk${jdkMajor}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]" - } - } - // current pattern since 12.0.1 - ivy { - url "https://download.oracle.com" - metadataSources { - artifact() - } - patternLayout { - artifact "java/GA/jdk${jdkVersion}/${hash}/${jdkBuild}/GPL/openjdk-[revision]_[module]-x64_bin.[ext]" - } - } -} -for (String platform : ['linux', 'darwin', 'windows']) { - String jdkConfigName = "jdk_${platform}" - Configuration jdkConfig = configurations.create(jdkConfigName) - String extension = platform.equals('windows') ? 'zip' : 'tar.gz' - dependencies.add(jdkConfigName, "jdk:${platform.equals('darwin') ? 'osx' : platform}:${jdkVersion}@${extension}") - - int rootNdx = platform.equals('darwin') ? 2 : 1 - Closure removeRootDir = { - it.eachFile { FileCopyDetails details -> - details.relativePath = new RelativePath(true, details.relativePath.segments[rootNdx..-1] as String[]) - } - it.includeEmptyDirs false - } - String extractDir = "${buildDir}/jdks/openjdk-${jdkVersion}_${platform}" - project.task("extract${platform.capitalize()}Jdk", type: Copy) { - doFirst { - project.delete(extractDir) - } - into extractDir - if (extension.equals('zip')) { - from({ zipTree(jdkConfig.singleFile) }, removeRootDir) - } else { - from({ tarTree(resources.gzip(jdkConfig.singleFile)) }, removeRootDir) - } - } -} - // make sure we have a clean task since we aren't a java project, but we have tasks that // put stuff in the build dir task clean(type: Delete) { @@ -290,6 +224,9 @@ task clean(type: Delete) { } configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { + + apply plugin: 'elasticsearch.jdk-download' + // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... /***************************************************************************** * Properties to expand when copying packaging files * @@ -431,9 +368,15 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } - jdkFiles = { platform -> - copySpec { - from project(':distribution').tasks.getByName("extract${platform.capitalize()}Jdk") + jdkFiles = { project, platform -> + project.jdks { + "bundled_${platform}" { + it.platform = platform + it.version = VersionProperties.bundledJdk + } + } + return copySpec { + from project.jdks."bundled_${platform}" eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 8285d8dae2b0d..87644fb7f6785 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -121,8 +121,9 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased } Closure createRunBwcGradleTask = { name, extraConfig -> - return tasks.create(name: "$name", type: Exec) { + return tasks.create(name: "$name", type: LoggedExec) { dependsOn checkoutBwcBranch, writeBuildMetadata + spoolOutput = true workingDir = checkoutDir doFirst { // Execution time so that the checkouts are available diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index af1479d360cd6..136803a5d83ea 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -53,6 +53,7 @@ import java.util.regex.Pattern buildscript { repositories { maven { + name "gradle-plugins" url "https://plugins.gradle.org/m2/" } } @@ -142,7 +143,7 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) { } if (jdk) { into('jdk') { - with jdkFiles('linux') + with jdkFiles(project, 'linux') } } // we need to specify every intermediate directory in these paths so the package managers know they are explicitly @@ -269,13 +270,13 @@ apply plugin: 'nebula.ospackage-base' // this is package indepdendent configuration ospackage { maintainer 'Elasticsearch Team ' - summary ''' - Elasticsearch is a distributed RESTful search engine built for the cloud. + summary 'Distributed RESTful search engine built for the cloud' + packageDescription ''' Reference documentation can be found at https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html and the 'Elasticsearch: The Definitive Guide' book can be found at https://www.elastic.co/guide/en/elasticsearch/guide/current/index.html - '''.stripIndent().replace('\n', ' ').trim() + '''.stripIndent().trim() url 'https://www.elastic.co/' // signing setup @@ -287,7 +288,8 @@ ospackage { new File(new File(System.getProperty('user.home'), '.gnupg'), 'secring.gpg') } - requires('coreutils') + // version found on oldest supported distro, centos-6 + requires('coreutils', '8.4', GREATER | EQUAL) fileMode 0644 dirMode 0755 @@ -311,12 +313,18 @@ Closure commonDebConfig(boolean oss, boolean jdk) { version = project.version.replace('-', '~') packageGroup 'web' - requires 'bash' + + // versions found on oldest supported distro, centos-6 + requires('bash', '4.1', GREATER | EQUAL) + requires('lsb-base', '4', GREATER | EQUAL) requires 'libc6' requires 'adduser' into('/usr/share/lintian/overrides') { from('src/deb/lintian/elasticsearch') + if (oss) { + rename('elasticsearch', 'elasticsearch-oss') + } } } } diff --git a/distribution/packages/src/common/scripts/postrm b/distribution/packages/src/common/scripts/postrm index a3cb5e1208fe7..c54df43450af4 100644 --- a/distribution/packages/src/common/scripts/postrm +++ b/distribution/packages/src/common/scripts/postrm @@ -8,7 +8,6 @@ # On RedHat, # $1=0 : indicates a removal # $1=1 : indicates an upgrade - REMOVE_DIRS=false REMOVE_USER_AND_GROUP=false @@ -55,6 +54,13 @@ if [ "$REMOVE_DIRS" = "true" ]; then echo " OK" fi + # plugins may have contained bin files + if [ -d /usr/share/elasticsearch/bin ]; then + echo -n "Deleting plugin bin directories..." + rm -rf /usr/share/elasticsearch/bin + echo " OK" + fi + if [ -d /var/run/elasticsearch ]; then echo -n "Deleting PID directory..." rm -rf /var/run/elasticsearch diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index 1ca52eaed2863..98038177a003b 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -1,8 +1,48 @@ -# Ignore arch dependent warnings, we chose the right libs on start -elasticsearch binary: arch-independent-package-contains-binary-or-object -# Not stripping external libraries -elasticsearch binary: unstripped-binary-or-object -# Ignore arch dependent warnings, we chose the right libs on start -elasticsearch binary: arch-dependent-file-in-usr-share -# Please check our changelog at http://www.elastic.co/downloads/elasticsearch -elasticsearch binary: changelog-file-missing-in-native-package +# we don't have a changelog, but we put our copyright file +# under /usr/share/doc/elasticsearch, which triggers this warning +changelog-file-missing-in-native-package + +# we intentionally copy our copyright file for all deb packages +copyright-file-contains-full-apache-2-license +copyright-should-refer-to-common-license-file-for-apache-2 +copyright-without-copyright-notice + +# we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages +arch-dependent-file-in-usr-share + +# we have a bundled jdk, so don't use jarwrapper +missing-dep-on-jarwrapper + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/elasticsearch 0660 != 0644 +non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 +non-standard-file-perm etc/elasticsearch/* +non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 +non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 +executable-is-not-world-readable etc/init.d/elasticsearch 0750 +non-standard-file-permissions-for-etc-init.d-script etc/init.d/elasticsearch 0750 != 0755 + +# this lintian tag is simply wrong; contrary to the explanation, debian systemd +# does actually look at /usr/lib/systemd/system +systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service + +# we do not automatically enable the service in init.d or systemd +script-in-etc-init.d-not-registered-via-update-rc.d etc/init.d/elasticsearch + +# the package scripts handle init.d/systemd directly and don't need to use deb helpers +maintainer-script-calls-systemctl +prerm-calls-updaterc.d elasticsearch + +# bundled JDK +embedded-library +arch-dependent-file-in-usr-share usr/share/elasticsearch/jdk/* +unstripped-binary-or-object usr/share/elasticsearch/jdk/* +extra-license-file usr/share/elasticsearch/jdk/legal/* +hardening-no-pie usr/share/elasticsearch/jdk/bin/* +hardening-no-pie usr/share/elasticsearch/jdk/lib/* + +# the system java version that lintian assumes is far behind what elasticsearch uses +unknown-java-class-version + +# elastic licensed modules contain elastic license +extra-license-file usr/share/elasticsearch/modules/* diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch index 8bdea4950cb75..b7ed2b648b76f 100755 --- a/distribution/src/bin/elasticsearch +++ b/distribution/src/bin/elasticsearch @@ -16,9 +16,13 @@ source "`dirname "$0"`"/elasticsearch-env +if [ -z "$ES_TMPDIR" ]; then + ES_TMPDIR=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.TempDirectory` +fi + ES_JVM_OPTIONS="$ES_PATH_CONF"/jvm.options JVM_OPTIONS=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_JVM_OPTIONS"` -ES_JAVA_OPTS="${JVM_OPTIONS//\$\{ES_TMPDIR\}/$ES_TMPDIR} $ES_JAVA_OPTS" +ES_JAVA_OPTS="${JVM_OPTIONS//\$\{ES_TMPDIR\}/$ES_TMPDIR}" # manual parsing to find out, if process should be detached if ! echo $* | grep -E '(^-d |-d$| -d |--daemonize$|--daemonize )' > /dev/null; then diff --git a/distribution/src/bin/elasticsearch-env b/distribution/src/bin/elasticsearch-env index 2a490622b34b4..78cb503ecef7c 100644 --- a/distribution/src/bin/elasticsearch-env +++ b/distribution/src/bin/elasticsearch-env @@ -84,8 +84,4 @@ ES_DISTRIBUTION_FLAVOR=${es.distribution.flavor} ES_DISTRIBUTION_TYPE=${es.distribution.type} ES_BUNDLED_JDK=${es.bundled_jdk} -if [ -z "$ES_TMPDIR" ]; then - ES_TMPDIR=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.TempDirectory` -fi - cd "$ES_HOME" diff --git a/distribution/src/bin/elasticsearch-env.bat b/distribution/src/bin/elasticsearch-env.bat index f1cdc2fd22457..8ac141986a4a7 100644 --- a/distribution/src/bin/elasticsearch-env.bat +++ b/distribution/src/bin/elasticsearch-env.bat @@ -64,6 +64,3 @@ if defined JAVA_OPTS ( rem check the Java version %JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.java_version_checker.JavaVersionChecker" || exit /b 1 -if not defined ES_TMPDIR ( - for /f "tokens=* usebackq" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.TempDirectory"`) do set ES_TMPDIR=%%a -) diff --git a/distribution/src/bin/elasticsearch-service.bat b/distribution/src/bin/elasticsearch-service.bat index 7a0be55c4f565..2f9c280743dfb 100644 --- a/distribution/src/bin/elasticsearch-service.bat +++ b/distribution/src/bin/elasticsearch-service.bat @@ -112,7 +112,7 @@ if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;% @setlocal for /F "usebackq delims=" %%a in (`"%JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a -@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% +@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( exit /b 1 diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat index ecbbad826e797..8ef77ac4c7fe9 100644 --- a/distribution/src/bin/elasticsearch.bat +++ b/distribution/src/bin/elasticsearch.bat @@ -41,10 +41,14 @@ IF ERRORLEVEL 1 ( EXIT /B %ERRORLEVEL% ) +if not defined ES_TMPDIR ( + for /f "tokens=* usebackq" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.TempDirectory"`) do set ES_TMPDIR=%%a +) + set ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options @setlocal for /F "usebackq delims=" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.JvmOptionsParser" "!ES_JVM_OPTIONS!" ^|^| echo jvm_options_parser_failed`) do set JVM_OPTIONS=%%a -@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% +@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( exit /b 1 diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java index 761cd9e1be5db..d0d5bef9cfcf4 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmErgonomics.java @@ -19,24 +19,27 @@ package org.elasticsearch.tools.launchers; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Tunes Elasticsearch JVM settings based on inspection of provided JVM options. */ final class JvmErgonomics { - private static final long KB = 1024L; - - private static final long MB = 1024L * 1024L; - - private static final long GB = 1024L * 1024L * 1024L; - private JvmErgonomics() { throw new AssertionError("No instances intended"); @@ -48,48 +51,81 @@ private JvmErgonomics() { * @param userDefinedJvmOptions A list of JVM options that have been defined by the user. * @return A list of additional JVM options to set. */ - static List choose(List userDefinedJvmOptions) { - List ergonomicChoices = new ArrayList<>(); - Long heapSize = extractHeapSize(userDefinedJvmOptions); - Map systemProperties = extractSystemProperties(userDefinedJvmOptions); - if (heapSize != null) { - if (systemProperties.containsKey("io.netty.allocator.type") == false) { - if (heapSize <= 1 * GB) { - ergonomicChoices.add("-Dio.netty.allocator.type=unpooled"); - } else { - ergonomicChoices.add("-Dio.netty.allocator.type=pooled"); - } + static List choose(final List userDefinedJvmOptions) throws InterruptedException, IOException { + final List ergonomicChoices = new ArrayList<>(); + final Map> finalJvmOptions = finalJvmOptions(userDefinedJvmOptions); + final long heapSize = extractHeapSize(finalJvmOptions); + final Map systemProperties = extractSystemProperties(userDefinedJvmOptions); + if (systemProperties.containsKey("io.netty.allocator.type") == false) { + if (heapSize <= 1 << 30) { + ergonomicChoices.add("-Dio.netty.allocator.type=unpooled"); + } else { + ergonomicChoices.add("-Dio.netty.allocator.type=pooled"); } } + final long maxDirectMemorySize = extractMaxDirectMemorySize(finalJvmOptions); + if (maxDirectMemorySize == 0) { + ergonomicChoices.add("-XX:MaxDirectMemorySize=" + heapSize / 2); + } return ergonomicChoices; } - private static final Pattern MAX_HEAP_SIZE = Pattern.compile("^(-Xmx|-XX:MaxHeapSize=)(?\\d+)(?\\w)?$"); + private static final Pattern OPTION = + Pattern.compile("^\\s*\\S+\\s+(?\\S+)\\s+:?=\\s+(?\\S+)?\\s+\\{[^}]+?\\}\\s+\\{[^}]+}"); - // package private for testing - static Long extractHeapSize(List userDefinedJvmOptions) { - for (String jvmOption : userDefinedJvmOptions) { - final Matcher matcher = MAX_HEAP_SIZE.matcher(jvmOption); - if (matcher.matches()) { - final long size = Long.parseLong(matcher.group("size")); - final String unit = matcher.group("unit"); - if (unit == null) { - return size; - } else { - switch (unit.toLowerCase(Locale.ROOT)) { - case "k": - return size * KB; - case "m": - return size * MB; - case "g": - return size * GB; - default: - throw new IllegalArgumentException("Unknown unit [" + unit + "] for max heap size in [" + jvmOption + "]"); - } - } - } + static Map> finalJvmOptions( + final List userDefinedJvmOptions) throws InterruptedException, IOException { + return flagsFinal(userDefinedJvmOptions).stream() + .map(OPTION::matcher).filter(Matcher::matches) + .collect(Collectors.toUnmodifiableMap(m -> m.group("flag"), m -> Optional.ofNullable(m.group("value")))); + } + + private static List flagsFinal(final List userDefinedJvmOptions) throws InterruptedException, IOException { + /* + * To deduce the final set of JVM options that Elasticsearch is going to start with, we start a separate Java process with the JVM + * options that we would pass on the command line. For this Java process we will add two additional flags, -XX:+PrintFlagsFinal and + * -version. This causes the Java process that we start to parse the JVM options into their final values, display them on standard + * output, print the version to standard error, and then exit. The JVM itself never bootstraps, and therefore this process is + * lightweight. By doing this, we get the JVM options parsed exactly as the JVM that we are going to execute would parse them + * without having to implement our own JVM option parsing logic. + */ + final String java = Path.of(System.getProperty("java.home"), "bin", "java").toString(); + final List command = + Stream.of(Stream.of(java), userDefinedJvmOptions.stream(), Stream.of("-XX:+PrintFlagsFinal"), Stream.of("-version")) + .reduce(Stream::concat) + .get() + .collect(Collectors.toUnmodifiableList()); + final Process process = new ProcessBuilder().command(command).start(); + final List output = readLinesFromInputStream(process.getInputStream()); + final List error = readLinesFromInputStream(process.getErrorStream()); + final int status = process.waitFor(); + if (status != 0) { + final String message = String.format( + Locale.ROOT, + "starting java failed with [%d]\noutput:\n%s\nerror:\n%s", + status, + String.join("\n", output), + String.join("\n", error)); + throw new RuntimeException(message); + } else { + return output; } - return null; + } + + private static List readLinesFromInputStream(final InputStream is) throws IOException { + try (InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8); + BufferedReader br = new BufferedReader(isr)) { + return br.lines().collect(Collectors.toUnmodifiableList()); + } + } + + // package private for testing + static Long extractHeapSize(final Map> finalJvmOptions) { + return Long.parseLong(finalJvmOptions.get("MaxHeapSize").get()); + } + + static long extractMaxDirectMemorySize(final Map> finalJvmOptions) { + return Long.parseLong(finalJvmOptions.get("MaxDirectMemorySize").get()); } private static final Pattern SYSTEM_PROPERTY = Pattern.compile("^-D(?[\\w+].*?)=(?.*)$"); @@ -105,4 +141,5 @@ static Map extractSystemProperties(List userDefinedJvmOp } return systemProperties; } + } diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index d74f106c50ba4..7894cab72a1ee 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -19,12 +19,14 @@ package org.elasticsearch.tools.launchers; +import org.elasticsearch.tools.java_version_checker.JavaVersion; + import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; @@ -35,10 +37,10 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; +import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; - -import org.elasticsearch.tools.java_version_checker.JavaVersion; +import java.util.stream.Collectors; /** * Parses JVM options from a file and prints a single line with all JVM options to standard output. @@ -51,14 +53,14 @@ final class JvmOptionsParser { * * @param args the args to the program which should consist of a single option, the path to the JVM options */ - public static void main(final String[] args) throws IOException { + public static void main(final String[] args) throws InterruptedException, IOException { if (args.length != 1) { throw new IllegalArgumentException("expected one argument specifying path to jvm.options but was " + Arrays.toString(args)); } final List jvmOptions = new ArrayList<>(); final SortedMap invalidLines = new TreeMap<>(); try (InputStream is = Files.newInputStream(Paths.get(args[0])); - Reader reader = new InputStreamReader(is, Charset.forName("UTF-8")); + Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(reader)) { parse( JavaVersion.majorVersion(JavaVersion.CURRENT), @@ -78,7 +80,14 @@ public void accept(final int lineNumber, final String line) { } if (invalidLines.isEmpty()) { - List ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions); + // now append the JVM options from ES_JAVA_OPTS + final String environmentJvmOptions = System.getenv("ES_JAVA_OPTS"); + if (environmentJvmOptions != null) { + jvmOptions.addAll(Arrays.stream(environmentJvmOptions.split("\\s+")) + .filter(Predicate.not(String::isBlank)) + .collect(Collectors.toUnmodifiableList())); + } + final List ergonomicJvmOptions = JvmErgonomics.choose(jvmOptions); jvmOptions.addAll(ergonomicJvmOptions); final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions); Launchers.outPrintln(spaceDelimitedJvmOptions); diff --git a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java index 4b075d78b70a8..7fe5cd0cf98b0 100644 --- a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java +++ b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/JvmErgonomicsTests.java @@ -19,38 +19,86 @@ package org.elasticsearch.tools.launchers; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class JvmErgonomicsTests extends LaunchersTestCase { - public void testExtractValidHeapSize() { - assertEquals(Long.valueOf(1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx1024"))); - assertEquals(Long.valueOf(2L * 1024 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2g"))); - assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx32M"))); - assertEquals(Long.valueOf(32 * 1024 * 1024), JvmErgonomics.extractHeapSize(Collections.singletonList("-XX:MaxHeapSize=32M"))); + + public void testExtractValidHeapSizeUsingXmx() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx2g"))), + equalTo(2L << 30)); + } + + public void testExtractValidHeapSizeUsingMaxHeapSize() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-XX:MaxHeapSize=2g"))), + equalTo(2L << 30)); + } + + public void testExtractValidHeapSizeNoOptionPresent() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.emptyList())), + greaterThan(0L)); + } + + public void testHeapSizeInvalid() throws InterruptedException, IOException { + try { + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx2Z"))); + fail("expected starting java to fail"); + } catch (final RuntimeException e) { + assertThat(e, hasToString(containsString(("starting java failed")))); + assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx2Z")))); + } + } + + public void testHeapSizeTooSmall() throws InterruptedException, IOException { + try { + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx1024"))); + fail("expected starting java to fail"); + } catch (final RuntimeException e) { + assertThat(e, hasToString(containsString(("starting java failed")))); + assertThat(e, hasToString(containsString(("Too small maximum heap")))); + } } - public void testExtractInvalidHeapSize() { + public void testHeapSizeWithSpace() throws InterruptedException, IOException { try { - JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx2T")); - fail("Expected IllegalArgumentException to be raised"); - } catch (IllegalArgumentException expected) { - assertEquals("Unknown unit [T] for max heap size in [-Xmx2T]", expected.getMessage()); + JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx 1024"))); + fail("expected starting java to fail"); + } catch (final RuntimeException e) { + assertThat(e, hasToString(containsString(("starting java failed")))); + assertThat(e, hasToString(containsString(("Invalid maximum heap size: -Xmx 1024")))); } } - public void testExtractNoHeapSize() { - assertNull("No spaces allowed", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xmx 1024"))); - assertNull("JVM option is not present", JvmErgonomics.extractHeapSize(Collections.singletonList(""))); - assertNull("Multiple JVM options per line", JvmErgonomics.extractHeapSize(Collections.singletonList("-Xms2g -Xmx2g"))); + public void testMaxDirectMemorySizeUnset() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.extractMaxDirectMemorySize(JvmErgonomics.finalJvmOptions(Collections.singletonList("-Xmx1g"))), + equalTo(0L)); + } + + public void testMaxDirectMemorySizeSet() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.extractMaxDirectMemorySize(JvmErgonomics.finalJvmOptions( + Arrays.asList("-Xmx1g", "-XX:MaxDirectMemorySize=512m"))), + equalTo(512L << 20)); } public void testExtractSystemProperties() { @@ -69,15 +117,39 @@ public void testExtractNoSystemProperties() { assertTrue(parsedSystemProperties.isEmpty()); } - public void testLittleMemoryErgonomicChoices() { - String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G")); - List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=unpooled"); - assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap))); + public void testPooledMemoryChoiceOnSmallHeap() throws InterruptedException, IOException { + final String smallHeap = randomFrom(Arrays.asList("64M", "512M", "1024M", "1G")); + assertThat( + JvmErgonomics.choose(Arrays.asList("-Xms" + smallHeap, "-Xmx" + smallHeap)), + hasItem("-Dio.netty.allocator.type=unpooled")); } - public void testPlentyMemoryErgonomicChoices() { - String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G")); - List expectedChoices = Collections.singletonList("-Dio.netty.allocator.type=pooled"); - assertEquals(expectedChoices, JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap))); + public void testPooledMemoryChoiceOnNotSmallHeap() throws InterruptedException, IOException { + final String largeHeap = randomFrom(Arrays.asList("1025M", "2048M", "2G", "8G")); + assertThat( + JvmErgonomics.choose(Arrays.asList("-Xms" + largeHeap, "-Xmx" + largeHeap)), + hasItem("-Dio.netty.allocator.type=pooled")); } + + public void testMaxDirectMemorySizeChoice() throws InterruptedException, IOException { + final Map heapMaxDirectMemorySize = Map.of( + "64M", Long.toString((64L << 20) / 2), + "512M", Long.toString((512L << 20) / 2), + "1024M", Long.toString((1024L << 20) / 2), + "1G", Long.toString((1L << 30) / 2), + "2048M", Long.toString((2048L << 20) / 2), + "2G", Long.toString((2L << 30) / 2), + "8G", Long.toString((8L << 30) / 2)); + final String heapSize = randomFrom(heapMaxDirectMemorySize.keySet().toArray(String[]::new)); + assertThat( + JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize)), + hasItem("-XX:MaxDirectMemorySize=" + heapMaxDirectMemorySize.get(heapSize))); + } + + public void testMaxDirectMemorySizeChoiceWhenSet() throws InterruptedException, IOException { + assertThat( + JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g")), + everyItem(not(startsWith("-XX:MaxDirectMemorySize=")))); + } + } diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index f728d76751dc3..224be0a0cee30 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,4 +1,4 @@ -:version: 8.0.0-alpha1 +:version: 8.0.0 //// bare_version never includes -alpha or -beta //// diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index 764d53d48172c..4a7fd7482d26e 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -68,6 +68,7 @@ For Gradle: ["source","groovy",subs="attributes"] -------------------------------------------------- maven { + name "lucene-snapshots" url 'https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/00142c9' } -------------------------------------------------- diff --git a/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc b/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc index 41fa841060b30..ec2253b2c25f4 100644 --- a/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc +++ b/docs/java-rest/high-level/dataframe/get_data_frame.asciidoc @@ -13,7 +13,7 @@ The API accepts a +{request}+ object and returns a +{response}+. ==== Get Data Frame Request A +{request}+ requires either a data frame transform id, a comma separated list of ids or -the special wildcard `_all` to get all {dataframe-transform}s +the special wildcard `_all` to get all {dataframe-transforms} ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -29,8 +29,10 @@ The following arguments are optional. -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- -<1> Page {dataframe-transform}s starting from this value -<2> Return at most `size` {dataframe-transform}s +<1> The page parameters `from` and `size`. `from` specifies the number of +{dataframe-transforms} to skip. `size` specifies the maximum number of +{dataframe-transforms} to get. Defaults to `0` and `100` respectively. + include::../execution.asciidoc[] diff --git a/docs/java-rest/high-level/dataframe/put_data_frame.asciidoc b/docs/java-rest/high-level/dataframe/put_data_frame.asciidoc index bb1b20aaa1a52..567449c9c25b1 100644 --- a/docs/java-rest/high-level/dataframe/put_data_frame.asciidoc +++ b/docs/java-rest/high-level/dataframe/put_data_frame.asciidoc @@ -66,6 +66,11 @@ Defines the pivot function `group by` fields and the aggregation to reduce the d -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-pivot-config] -------------------------------------------------- +<1> The `GroupConfig` to use in the pivot +<2> The aggregations to use +<3> The maximum paging size for the transform when pulling data +from the source. The size dynamically adjusts as the transform +is running to recover from and prevent OOM issues. ===== GroupConfig The grouping terms. Defines the group by and destination fields diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index 5ef28d7fc86d1..89912cc2a4593 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -104,6 +104,7 @@ For Gradle: ["source","groovy",subs="attributes"] -------------------------------------------------- maven { + name 'lucene-snapshots' url 'https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/83f9835' } -------------------------------------------------- diff --git a/docs/painless/painless-api-reference/index.asciidoc b/docs/painless/painless-api-reference/index.asciidoc index 88130f7fdfc02..652904533223e 100644 --- a/docs/painless/painless-api-reference/index.asciidoc +++ b/docs/painless/painless-api-reference/index.asciidoc @@ -10,7 +10,7 @@ |Aggs Reduce | <> | |Analysis | <> | <> |Bucket Aggregation | <> | -|Field | <> | +|Field | <> | <> |Filter | <> | |Ingest | <> | <> |Interval | <> | @@ -33,6 +33,7 @@ include::painless-api-reference-shared/index.asciidoc[] include::painless-api-reference-analysis/index.asciidoc[] +include::painless-api-reference-field/index.asciidoc[] include::painless-api-reference-ingest/index.asciidoc[] include::painless-api-reference-moving-function/index.asciidoc[] include::painless-api-reference-score/index.asciidoc[] diff --git a/docs/painless/painless-api-reference/painless-api-reference-analysis/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-analysis/index.asciidoc index 8dc729b31ea1f..d09af700a2fdc 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-analysis/index.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-analysis/index.asciidoc @@ -7,6 +7,10 @@ The following specialized API is available in the Analysis context. * See the <> for further API available in all contexts. +==== Classes By Package +The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields. + + ==== org.elasticsearch.analysis.common <> diff --git a/docs/painless/painless-api-reference/painless-api-reference-analysis/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-analysis/packages.asciidoc index 106f9272df4a8..ff272cb228f6f 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-analysis/packages.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-analysis/packages.asciidoc @@ -3,7 +3,7 @@ [role="exclude",id="painless-api-reference-analysis-org-elasticsearch-analysis-common"] === Analysis API for package org.elasticsearch.analysis.common -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-analysis-AnalysisPredicateScript-Token]] ==== AnalysisPredicateScript.Token diff --git a/docs/painless/painless-api-reference/painless-api-reference-field/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-field/index.asciidoc new file mode 100644 index 0000000000000..eb71e71ccf165 --- /dev/null +++ b/docs/painless/painless-api-reference/painless-api-reference-field/index.asciidoc @@ -0,0 +1,17 @@ +// This file is auto-generated. Do not edit. + +[[painless-api-reference-field]] +=== Field API + +The following specialized API is available in the Field context. + +* See the <> for further API available in all contexts. + +==== Static Methods +The following methods are directly callable without a class/instance qualifier. Note parameters denoted by a (*) are treated as read-only values. + +* List domainSplit(String) +* List domainSplit(String, Map) + +include::packages.asciidoc[] + diff --git a/docs/painless/painless-api-reference/painless-api-reference-field/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-field/packages.asciidoc new file mode 100644 index 0000000000000..282fcf136a65c --- /dev/null +++ b/docs/painless/painless-api-reference/painless-api-reference-field/packages.asciidoc @@ -0,0 +1,3 @@ +// This file is auto-generated. Do not edit. + + diff --git a/docs/painless/painless-api-reference/painless-api-reference-ingest/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-ingest/index.asciidoc index e4067c24dcea0..ff70233defb0c 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-ingest/index.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-ingest/index.asciidoc @@ -7,6 +7,10 @@ The following specialized API is available in the Ingest context. * See the <> for further API available in all contexts. +==== Classes By Package +The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields. + + ==== org.elasticsearch.ingest.common <> diff --git a/docs/painless/painless-api-reference/painless-api-reference-ingest/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-ingest/packages.asciidoc index b6a48ee7d5d2e..a4a5a4529cc95 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-ingest/packages.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-ingest/packages.asciidoc @@ -3,7 +3,7 @@ [role="exclude",id="painless-api-reference-ingest-org-elasticsearch-ingest-common"] === Ingest API for package org.elasticsearch.ingest.common -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-ingest-Processors]] ==== Processors diff --git a/docs/painless/painless-api-reference/painless-api-reference-moving-function/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-moving-function/index.asciidoc index 9d37e81a94fc7..93a88519d65ce 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-moving-function/index.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-moving-function/index.asciidoc @@ -7,6 +7,10 @@ The following specialized API is available in the Moving Function context. * See the <> for further API available in all contexts. +==== Classes By Package +The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields. + + ==== org.elasticsearch.search.aggregations.pipeline <> diff --git a/docs/painless/painless-api-reference/painless-api-reference-moving-function/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-moving-function/packages.asciidoc index 824aa23f7ebfe..bdd8b1fd73ce2 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-moving-function/packages.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-moving-function/packages.asciidoc @@ -3,7 +3,7 @@ [role="exclude",id="painless-api-reference-moving-function-org-elasticsearch-search-aggregations-pipeline"] === Moving Function API for package org.elasticsearch.search.aggregations.pipeline -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-moving-function-MovingFunctions]] ==== MovingFunctions diff --git a/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc index fe9e0e1d23505..d355a495e0625 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc @@ -7,6 +7,31 @@ The following specialized API is available in the Score context. * See the <> for further API available in all contexts. +==== Static Methods +The following methods are directly callable without a class/instance qualifier. Note parameters denoted by a (*) are treated as read-only values. + +* double cosineSimilarity(List *, VectorScriptDocValues.DenseVectorScriptDocValues) +* double cosineSimilaritySparse(Map *, VectorScriptDocValues.SparseVectorScriptDocValues) +* double decayDateExp(String *, String *, String *, double *, JodaCompatibleZonedDateTime) +* double decayDateGauss(String *, String *, String *, double *, JodaCompatibleZonedDateTime) +* double decayDateLinear(String *, String *, String *, double *, JodaCompatibleZonedDateTime) +* double decayGeoExp(String *, String *, String *, double *, GeoPoint) +* double decayGeoGauss(String *, String *, String *, double *, GeoPoint) +* double decayGeoLinear(String *, String *, String *, double *, GeoPoint) +* double decayNumericExp(double *, double *, double *, double *, double) +* double decayNumericGauss(double *, double *, double *, double *, double) +* double decayNumericLinear(double *, double *, double *, double *, double) +* double dotProduct(List, VectorScriptDocValues.DenseVectorScriptDocValues) +* double dotProductSparse(Map *, VectorScriptDocValues.SparseVectorScriptDocValues) +* double randomScore(int *) +* double randomScore(int *, String *) +* double saturation(double, double) +* double sigmoid(double, double, double) + +==== Classes By Package +The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields. + + ==== org.elasticsearch.index.query <> diff --git a/docs/painless/painless-api-reference/painless-api-reference-score/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-score/packages.asciidoc index 287f7a223ca5e..10f0f1b6daeab 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-score/packages.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-score/packages.asciidoc @@ -3,7 +3,7 @@ [role="exclude",id="painless-api-reference-score-org-elasticsearch-index-query"] === Score API for package org.elasticsearch.index.query -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-score-VectorScriptDocValues]] ==== VectorScriptDocValues diff --git a/docs/painless/painless-api-reference/painless-api-reference-shared/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-shared/index.asciidoc index c349602a7b580..d5452ce8fab96 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-shared/index.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-shared/index.asciidoc @@ -5,6 +5,10 @@ The following API is available in all contexts. +==== Classes By Package +The following classes are available grouped by their respective packages. Click on a class to view details about the available methods and fields. + + ==== java.lang <> diff --git a/docs/painless/painless-api-reference/painless-api-reference-shared/packages.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-shared/packages.asciidoc index ed6e10e7b193c..75ad21ddc93f2 100644 --- a/docs/painless/painless-api-reference/painless-api-reference-shared/packages.asciidoc +++ b/docs/painless/painless-api-reference/painless-api-reference-shared/packages.asciidoc @@ -3,7 +3,7 @@ [role="exclude",id="painless-api-reference-shared-java-lang"] === Shared API for package java.lang -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-Appendable]] ==== Appendable @@ -1253,6 +1253,8 @@ See the <> for a high-level overview * String {java11-javadoc}/java.base/java/lang/String.html#replace(java.lang.CharSequence,java.lang.CharSequence)[replace](CharSequence, CharSequence) * String replaceAll(Pattern, Function) * String replaceFirst(Pattern, Function) +* String[] splitOnToken(String) +* String[] splitOnToken(String, int) * boolean {java11-javadoc}/java.base/java/lang/String.html#startsWith(java.lang.String)[startsWith](String) * boolean {java11-javadoc}/java.base/java/lang/String.html#startsWith(java.lang.String,int)[startsWith](String, int) * CharSequence {java11-javadoc}/java.base/java/lang/CharSequence.html#subSequence(int,int)[subSequence](int, int) @@ -1399,7 +1401,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-math"] === Shared API for package java.math -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-BigDecimal]] ==== BigDecimal @@ -1557,7 +1559,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-text"] === Shared API for package java.text -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-Annotation]] ==== Annotation @@ -2265,7 +2267,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-time"] === Shared API for package java.time -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-Clock]] ==== Clock @@ -3078,7 +3080,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-time-chrono"] === Shared API for package java.time.chrono -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-AbstractChronology]] ==== AbstractChronology @@ -3675,7 +3677,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-time-format"] === Shared API for package java.time.format -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-DateTimeFormatter]] ==== DateTimeFormatter @@ -3874,7 +3876,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-time-temporal"] === Shared API for package java.time.temporal -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-ChronoField]] ==== ChronoField @@ -4166,7 +4168,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-time-zone"] === Shared API for package java.time.zone -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-ZoneOffsetTransition]] ==== ZoneOffsetTransition @@ -4265,7 +4267,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-util"] === Shared API for package java.util -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-AbstractCollection]] ==== AbstractCollection @@ -7194,7 +7196,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-util-function"] === Shared API for package java.util.function -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-BiConsumer]] ==== BiConsumer @@ -7582,7 +7584,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-util-regex"] === Shared API for package java.util.regex -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-Matcher]] ==== Matcher @@ -7635,7 +7637,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-java-util-stream"] === Shared API for package java.util.stream -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-BaseStream]] ==== BaseStream @@ -7957,7 +7959,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-apache-lucene-util"] === Shared API for package org.apache.lucene.util -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-BytesRef]] ==== BytesRef @@ -7974,7 +7976,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-common-geo"] === Shared API for package org.elasticsearch.common.geo -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-GeoPoint]] ==== GeoPoint @@ -7987,7 +7989,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-fielddata"] === Shared API for package org.elasticsearch.index.fielddata -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-ScriptDocValues-Booleans]] ==== ScriptDocValues.Booleans @@ -8386,7 +8388,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-mapper"] === Shared API for package org.elasticsearch.index.mapper -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-IpFieldMapper-IpFieldType-IpScriptDocValues]] ==== IpFieldMapper.IpFieldType.IpScriptDocValues @@ -8445,7 +8447,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-query"] === Shared API for package org.elasticsearch.index.query -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-IntervalFilterScript-Interval]] ==== IntervalFilterScript.Interval @@ -8459,7 +8461,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-index-similarity"] === Shared API for package org.elasticsearch.index.similarity -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-ScriptedSimilarity-Doc]] ==== ScriptedSimilarity.Doc @@ -8499,7 +8501,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-painless-api"] === Shared API for package org.elasticsearch.painless.api -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-Debug]] ==== Debug @@ -8511,7 +8513,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-script"] === Shared API for package org.elasticsearch.script -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-JodaCompatibleZonedDateTime]] ==== JodaCompatibleZonedDateTime @@ -8594,7 +8596,7 @@ See the <> for a high-level overview [role="exclude",id="painless-api-reference-shared-org-elasticsearch-search-lookup"] === Shared API for package org.elasticsearch.search.lookup -See the <> for a high-level overview of all packages. +See the <> for a high-level overview of all packages and classes. [[painless-api-reference-shared-FieldLookup]] ==== FieldLookup diff --git a/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc b/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc index 07914b671e781..3edb1080611d2 100644 --- a/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc +++ b/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc @@ -40,4 +40,4 @@ analysis chain matches a predicate. *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-bucket-script-agg-context.asciidoc b/docs/painless/painless-contexts/painless-bucket-script-agg-context.asciidoc index 5a5306016945d..f6e2a4b7a5a91 100644 --- a/docs/painless/painless-contexts/painless-bucket-script-agg-context.asciidoc +++ b/docs/painless/painless-contexts/painless-bucket-script-agg-context.asciidoc @@ -18,7 +18,7 @@ numeric:: ==== API -The standard <> is available. +The standard <> is available. ==== Example diff --git a/docs/painless/painless-contexts/painless-bucket-selector-agg-context.asciidoc b/docs/painless/painless-contexts/painless-bucket-selector-agg-context.asciidoc index 69fbce1d0828f..2d854c880cdcd 100644 --- a/docs/painless/painless-contexts/painless-bucket-selector-agg-context.asciidoc +++ b/docs/painless/painless-contexts/painless-bucket-selector-agg-context.asciidoc @@ -19,6 +19,10 @@ boolean:: ==== API +The standard <> is available. + +==== Example + To run this example, first follow the steps in <>. diff --git a/docs/painless/painless-contexts/painless-field-context.asciidoc b/docs/painless/painless-contexts/painless-field-context.asciidoc index 15a9f4255232c..5a95e88c68460 100644 --- a/docs/painless/painless-contexts/painless-field-context.asciidoc +++ b/docs/painless/painless-contexts/painless-field-context.asciidoc @@ -25,7 +25,8 @@ a customized value for each document in the results of a query. *API* -The standard <> is available. +Both the standard <> and +<> are available. *Example* diff --git a/docs/painless/painless-contexts/painless-filter-context.asciidoc b/docs/painless/painless-contexts/painless-filter-context.asciidoc index bf4741cfc02fc..eea810f616291 100644 --- a/docs/painless/painless-contexts/painless-filter-context.asciidoc +++ b/docs/painless/painless-contexts/painless-filter-context.asciidoc @@ -23,7 +23,7 @@ query to include and exclude documents. *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc index 546057ab1a0b8..858949deb5602 100644 --- a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc +++ b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc @@ -38,7 +38,8 @@ void:: *API* -The standard <> is available. +Both the standard <> and +<> are available. *Example* diff --git a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc index 5cc9ad8ecbb93..2d5edf6ab4cd8 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc @@ -24,4 +24,4 @@ optional as part of a full metric aggregation. *API* -The standard <> is available. +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc index 8c0fddfa33961..78ebac79c65ee 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc @@ -29,4 +29,4 @@ full metric aggregation. *API* -The standard <> is available. +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc index a34308aa93887..485d4da8439d8 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc @@ -44,4 +44,4 @@ part of a full metric aggregation. *API* -The standard <> is available. +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc index b492207ef4468..ba6b6dabdc924 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc @@ -25,4 +25,4 @@ specified) and is optional as part of a full metric aggregation. *API* -The standard <> is available. +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc b/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc index cd476481381a6..896e882c7837d 100644 --- a/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc +++ b/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc @@ -25,7 +25,7 @@ results. *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-reindex-context.asciidoc b/docs/painless/painless-contexts/painless-reindex-context.asciidoc index ae5445183a6ad..54791f2fa50db 100644 --- a/docs/painless/painless-contexts/painless-reindex-context.asciidoc +++ b/docs/painless/painless-contexts/painless-reindex-context.asciidoc @@ -65,4 +65,4 @@ reindexed into a target index. *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-score-context.asciidoc b/docs/painless/painless-contexts/painless-score-context.asciidoc index 2bec9021c1720..e5d3c538b4512 100644 --- a/docs/painless/painless-contexts/painless-score-context.asciidoc +++ b/docs/painless/painless-contexts/painless-score-context.asciidoc @@ -26,7 +26,8 @@ score to documents returned from a query. *API* -The standard <> is available. +Both the standard <> and +<> are available. *Example* diff --git a/docs/painless/painless-contexts/painless-similarity-context.asciidoc b/docs/painless/painless-contexts/painless-similarity-context.asciidoc index 98eff19a1943e..e48da21195dd7 100644 --- a/docs/painless/painless-contexts/painless-similarity-context.asciidoc +++ b/docs/painless/painless-contexts/painless-similarity-context.asciidoc @@ -56,4 +56,4 @@ uses synonyms. *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-sort-context.asciidoc b/docs/painless/painless-contexts/painless-sort-context.asciidoc index 64c17ad07a664..4a7743dc48800 100644 --- a/docs/painless/painless-contexts/painless-sort-context.asciidoc +++ b/docs/painless/painless-contexts/painless-sort-context.asciidoc @@ -25,7 +25,7 @@ Use a Painless script to *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc index ba42105f2e901..c9e72ac5b9288 100644 --- a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc @@ -51,7 +51,7 @@ result of query. *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-update-context.asciidoc b/docs/painless/painless-contexts/painless-update-context.asciidoc index 6ed8c2f7c13a3..a83bf47de1f78 100644 --- a/docs/painless/painless-contexts/painless-update-context.asciidoc +++ b/docs/painless/painless-contexts/painless-update-context.asciidoc @@ -52,7 +52,7 @@ add, modify, or delete fields within a single document. *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc b/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc index 91ab51561ef88..8e4924d426b0c 100644 --- a/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc @@ -14,7 +14,7 @@ include::painless-watcher-context-variables.asciidoc[] *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc index addfd11cab92e..71009d819a42d 100644 --- a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc @@ -33,8 +33,7 @@ The following variables are available in all watcher contexts. *API* - -The standard <> is available. +The standard <> is available. To run this example, first follow the steps in <>. diff --git a/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc b/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc index 92012720aa69e..ec0ac6519a44f 100644 --- a/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc @@ -14,7 +14,7 @@ include::painless-watcher-context-variables.asciidoc[] *API* -The standard <> is available. +The standard <> is available. *Example* diff --git a/docs/painless/painless-contexts/painless-weight-context.asciidoc b/docs/painless/painless-contexts/painless-weight-context.asciidoc index 9b4a47bc113b4..44438a1225ea6 100644 --- a/docs/painless/painless-contexts/painless-weight-context.asciidoc +++ b/docs/painless/painless-contexts/painless-weight-context.asciidoc @@ -39,4 +39,4 @@ Queries that contain multiple terms calculate a separate weight for each term. *API* -The standard <> is available. +The standard <> is available. diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index b6299139992d1..69d741fa79a90 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -29,7 +29,7 @@ include::install_remove.asciidoc[] [[analysis-icu-analyzer]] ==== ICU Analyzer -Performs basic normalization, tokenization and character folding, using the +The `icu_analyzer` analyzer performs basic normalization, tokenization and character folding, using the `icu_normalizer` char filter, `icu_tokenizer` and `icu_normalizer` token filter The following parameters are accepted: diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 2ee40b24a8548..2ee9025b6ded8 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -10,122 +10,194 @@ that here the interval can be specified using date/time expressions. Time-based data requires special support because time-based intervals are not always a fixed length. -==== Setting intervals - -There seems to be no limit to the creativity we humans apply to setting our -clocks and calendars. We've invented leap years and leap seconds, standard and -daylight savings times, and timezone offsets of 30 or 45 minutes rather than a -full hour. While these creations help keep us in sync with the cosmos and our -environment, they can make specifying time intervals accurately a real challenge. -The only universal truth our researchers have yet to disprove is that a -millisecond is always the same duration, and a second is always 1000 milliseconds. -Beyond that, things get complicated. - -Generally speaking, when you specify a single time unit, such as 1 hour or 1 day, you -are working with a _calendar interval_, but multiples, such as 6 hours or 3 days, are -_fixed-length intervals_. - -For example, a specification of 1 day (1d) from now is a calendar interval that -means "at -this exact time tomorrow" no matter the length of the day. A change to or from -daylight savings time that results in a 23 or 25 hour day is compensated for and the -specification of "this exact time tomorrow" is maintained. But if you specify 2 or -more days, each day must be of the same fixed duration (24 hours). In this case, if -the specified interval includes the change to or from daylight savings time, the -interval will end an hour sooner or later than you expect. - -There are similar differences to consider when you specify single versus multiple -minutes or hours. Multiple time periods longer than a day are not supported. - -Here are the valid time specifications and their meanings: +==== Calendar and Fixed intervals -milliseconds (ms) :: -Fixed length interval; supports multiples. +When configuring a date histogram aggregation, the interval can be specified +in two manners: calendar-aware time intervals, and fixed time intervals. -seconds (s) :: -1000 milliseconds; fixed length interval (except for the last second of a -minute that contains a leap-second, which is 2000ms long); supports multiples. +Calendar-aware intervals understand that daylight savings changes the length +of specific days, months have different amounts of days, and leap seconds can +be tacked onto a particular year. -minutes (m) :: +Fixed intervals are, by contrast, always multiples of SI units and do not change +based on calendaring context. + +[NOTE] +.Combined `interval` field is deprecated +================================== +deprecated[7.2, `interval` field is deprecated] Historically both calendar and fixed +intervals were configured in a single `interval` field, which led to confusing +semantics. Specifying `1d` would be assumed as a calendar-aware time, +whereas `2d` would be interpreted as fixed time. To get "one day" of fixed time, +the user would need to specify the next smaller unit (in this case, `24h`). + +This combined behavior was often unknown to users, and even when knowledgeable about +the behavior it was difficult to use and confusing. + +This behavior has been deprecated in favor of two new, explicit fields: `calendar_interval` +and `fixed_interval`. + +By forcing a choice between calendar and intervals up front, the semantics of the interval +are clear to the user immediately and there is no ambiguity. The old `interval` field +will be removed in the future. +================================== + +===== Calendar Intervals + +Calendar-aware intervals are configured with the `calendar_interval` parameter. +Calendar intervals can only be specified in "singular" quantities of the unit +(`1d`, `1M`, etc). Multiples, such as `2d`, are not supported and will throw an exception. + +The accepted units for calendar intervals are: + +minute (`m`, `1m`) :: All minutes begin at 00 seconds. -* One minute (1m) is the interval between 00 seconds of the first minute and 00 +One minute is the interval between 00 seconds of the first minute and 00 seconds of the following minute in the specified timezone, compensating for any -intervening leap seconds, so that the number of minutes and seconds past the -hour is the same at the start and end. -* Multiple minutes (__n__m) are intervals of exactly 60x1000=60,000 milliseconds -each. +intervening leap seconds, so that the number of minutes and seconds past the +hour is the same at the start and end. -hours (h) :: +hours (`h`, `1h`) :: All hours begin at 00 minutes and 00 seconds. -* One hour (1h) is the interval between 00:00 minutes of the first hour and 00:00 +One hour (1h) is the interval between 00:00 minutes of the first hour and 00:00 minutes of the following hour in the specified timezone, compensating for any intervening leap seconds, so that the number of minutes and seconds past the hour -is the same at the start and end. -* Multiple hours (__n__h) are intervals of exactly 60x60x1000=3,600,000 milliseconds -each. +is the same at the start and end. -days (d) :: + +days (`d`, `1d`) :: All days begin at the earliest possible time, which is usually 00:00:00 (midnight). -* One day (1d) is the interval between the start of the day and the start of +One day (1d) is the interval between the start of the day and the start of of the following day in the specified timezone, compensating for any intervening time changes. -* Multiple days (__n__d) are intervals of exactly 24x60x60x1000=86,400,000 -milliseconds each. -weeks (w) :: +week (`w`, `1w`) :: -* One week (1w) is the interval between the start day_of_week:hour:minute:second -and the same day of the week and time of the following week in the specified +One week is the interval between the start day_of_week:hour:minute:second +and the same day of the week and time of the following week in the specified timezone. -* Multiple weeks (__n__w) are not supported. -months (M) :: +month (`M`, `1M`) :: -* One month (1M) is the interval between the start day of the month and time of +One month is the interval between the start day of the month and time of day and the same day of the month and time of the following month in the specified timezone, so that the day of the month and time of day are the same at the start and end. -* Multiple months (__n__M) are not supported. -quarters (q) :: +quarter (`q`, `1q`) :: -* One quarter (1q) is the interval between the start day of the month and +One quarter (1q) is the interval between the start day of the month and time of day and the same day of the month and time of day three months later, so that the day of the month and time of day are the same at the start and end. + -* Multiple quarters (__n__q) are not supported. -years (y) :: +year (`y`, `1y`) :: -* One year (1y) is the interval between the start day of the month and time of -day and the same day of the month and time of day the following year in the +One year (1y) is the interval between the start day of the month and time of +day and the same day of the month and time of day the following year in the specified timezone, so that the date and time are the same at the start and end. + -* Multiple years (__n__y) are not supported. -NOTE: -In all cases, when the specified end time does not exist, the actual end time is -the closest available time after the specified end. +===== Calendar Interval Examples +As an example, here is an aggregation requesting bucket intervals of a month in calendar time: -Widely distributed applications must also consider vagaries such as countries that -start and stop daylight savings time at 12:01 A.M., so end up with one minute of -Sunday followed by an additional 59 minutes of Saturday once a year, and countries -that decide to move across the international date line. Situations like -that can make irregular timezone offsets seem easy. +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "date_histogram" : { + "field" : "date", + "calendar_interval" : "month" + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] -As always, rigorous testing, especially around time-change events, will ensure -that your time interval specification is -what you intend it to be. +If you attempt to use multiples of calendar units, the aggregation will fail because only +singular calendar units are supported: -WARNING: -To avoid unexpected results, all connected servers and clients must sync to a -reliable network time service. +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "date_histogram" : { + "field" : "date", + "calendar_interval" : "2d" + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] +// TEST[catch:bad_request] -==== Examples +[source,js] +-------------------------------------------------- +{ + "error" : { + "root_cause" : [...], + "type" : "x_content_parse_exception", + "reason" : "[1:82] [date_histogram] failed to parse field [calendar_interval]", + "caused_by" : { + "type" : "illegal_argument_exception", + "reason" : "The supplied interval [2d] could not be parsed as a calendar interval.", + "stack_trace" : "java.lang.IllegalArgumentException: The supplied interval [2d] could not be parsed as a calendar interval." + } + } +} + +-------------------------------------------------- +// NOTCONSOLE + +===== Fixed Intervals + +Fixed intervals are configured with the `fixed_interval` parameter. + +In contrast to calendar-aware intervals, fixed intervals are a fixed number of SI +units and never deviate, regardless of where they fall on the calendar. One second +is always composed of 1000ms. This allows fixed intervals to be specified in +any multiple of the supported units. + +However, it means fixed intervals cannot express other units such as months, +since the duration of a month is not a fixed quantity. Attempting to specify +a calendar interval like month or quarter will throw an exception. + +The accepted units for fixed intervals are: + +milliseconds (ms) :: + +seconds (s) :: +Defined as 1000 milliseconds each + +minutes (m) :: +All minutes begin at 00 seconds. -Requesting bucket intervals of a month. +Defined as 60 seconds each (60,000 milliseconds) + +hours (h) :: +All hours begin at 00 minutes and 00 seconds. +Defined as 60 minutes each (3,600,000 milliseconds) + +days (d) :: +All days begin at the earliest possible time, which is usually 00:00:00 +(midnight). + +Defined as 24 hours (86,400,000 milliseconds) + +===== Fixed Interval Examples + +If we try to recreate the "month" `calendar_interval` from earlier, we can approximate that with +30 fixed days: [source,js] -------------------------------------------------- @@ -135,7 +207,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "calendar_interval" : "month" + "fixed_interval" : "30d" } } } @@ -144,11 +216,7 @@ POST /sales/_search?size=0 // CONSOLE // TEST[setup:sales] -You can also specify time values using abbreviations supported by -<> parsing. -Note that fractional time values are not supported, but you can address this by -shifting to another -time unit (e.g., `1.5h` could instead be specified as `90m`). +But if we try to use a calendar unit that is not supported, such as weeks, we'll get an exception: [source,js] -------------------------------------------------- @@ -158,7 +226,7 @@ POST /sales/_search?size=0 "sales_over_time" : { "date_histogram" : { "field" : "date", - "fixed_interval" : "90m" + "fixed_interval" : "2w" } } } @@ -166,6 +234,50 @@ POST /sales/_search?size=0 -------------------------------------------------- // CONSOLE // TEST[setup:sales] +// TEST[catch:bad_request] + +[source,js] +-------------------------------------------------- +{ + "error" : { + "root_cause" : [...], + "type" : "x_content_parse_exception", + "reason" : "[1:82] [date_histogram] failed to parse field [fixed_interval]", + "caused_by" : { + "type" : "illegal_argument_exception", + "reason" : "failed to parse setting [date_histogram.fixedInterval] with value [2w] as a time value: unit is missing or unrecognized", + "stack_trace" : "java.lang.IllegalArgumentException: failed to parse setting [date_histogram.fixedInterval] with value [2w] as a time value: unit is missing or unrecognized" + } + } +} + +-------------------------------------------------- +// NOTCONSOLE + +===== Notes + +In all cases, when the specified end time does not exist, the actual end time is +the closest available time after the specified end. + +Widely distributed applications must also consider vagaries such as countries that +start and stop daylight savings time at 12:01 A.M., so end up with one minute of +Sunday followed by an additional 59 minutes of Saturday once a year, and countries +that decide to move across the international date line. Situations like +that can make irregular timezone offsets seem easy. + +As always, rigorous testing, especially around time-change events, will ensure +that your time interval specification is +what you intend it to be. + +WARNING: +To avoid unexpected results, all connected servers and clients must sync to a +reliable network time service. + +NOTE: fractional time values are not supported, but you can address this by +shifting to another time unit (e.g., `1.5h` could instead be specified as `90m`). + +NOTE: You can also specify time values using abbreviations supported by +<> parsing. ===== Keys @@ -522,8 +634,6 @@ control the order using the `order` setting. This setting supports the same `order` functionality as <>. -deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/keys] - ===== Using a script to aggregate by day of the week When you need to aggregate the results by day of the week, use a script that diff --git a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc index a92e2476ad77e..50462cc2871d0 100644 --- a/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/multiplexer-tokenfilter.asciidoc @@ -116,9 +116,8 @@ And it'd respond: duplicate of this token it has been removed from the token stream NOTE: The synonym and synonym_graph filters use their preceding analysis chain to -parse and analyse their synonym lists, and ignore any token filters in the chain -that produce multiple tokens at the same position. This means that any filters -within the multiplexer will be ignored for the purpose of synonyms. If you want to -use filters contained within the multiplexer for parsing synonyms (for example, to -apply stemming to the synonym lists), then you should append the synonym filter -to the relevant multiplexer filter list. +parse and analyse their synonym lists, and will throw an exception if that chain +contains token filters that produce multiple tokens at the same position. +If you want to apply synonyms to a token stream containing a multiplexer, then you +should append the synonym filter to each relevant multiplexer filter list, rather than +placing it after the multiplexer in the main token chain definition. diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index 2285c6f6e8989..b434129626d88 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -188,6 +188,10 @@ parsing synonyms, e.g. `asciifolding` will only produce the folded version of th token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an error. +If you need to build analyzers that include both multi-token filters and synonym +filters, consider using the <> filter, +with the multi-token filters in one branch and the synonym filter in the other. + WARNING: The synonym rules should not contain words that are removed by a filter that appears after in the chain (a `stop` filter for instance). Removing a term from a synonym rule breaks the matching at query time. diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 1107236194655..139f7c3ab0ad0 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -177,3 +177,7 @@ multiple versions of a token may choose which version of the token to emit when parsing synonyms, e.g. `asciifolding` will only produce the folded version of the token. Others, e.g. `multiplexer`, `word_delimiter_graph` or `ngram` will throw an error. + +If you need to build analyzers that include both multi-token filters and synonym +filters, consider using the <> filter, +with the multi-token filters in one branch and the synonym filter in the other. diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index d1ea1fad88515..03854fae2f61f 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -59,7 +59,7 @@ ml_autodetect (default distro only) ml_datafeed (default distro only) ml_utility (default distro only) refresh -rollup_indexing (default distro only)` +rollup_indexing (default distro only) search security-token-key (default distro only) snapshot diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 4bd3c2c9647a5..bb24dffd40f7d 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -125,11 +125,6 @@ information that concern the file system: `fs.data.available_in_bytes`:: Total number of bytes available to this Java virtual machine on this file store -`fs.data.spins` (Linux only):: - Indicates if the file store is backed by spinning storage. - `null` means we could not determine it, `true` means the device possibly spins - and `false` means it does not (ex: solid-state disks). - `fs.io_stats.devices` (Linux only):: Array of disk metrics for each device that is backing an Elasticsearch data path. These disk metrics are probed periodically diff --git a/docs/reference/commands/index.asciidoc b/docs/reference/commands/index.asciidoc index a13ea58c27d3e..e778366aa58b9 100644 --- a/docs/reference/commands/index.asciidoc +++ b/docs/reference/commands/index.asciidoc @@ -9,7 +9,6 @@ tasks from the command line: * <> * <> -* <> * <> * <> * <> @@ -21,7 +20,6 @@ tasks from the command line: include::certgen.asciidoc[] include::certutil.asciidoc[] -include::migrate-tool.asciidoc[] include::node-tool.asciidoc[] include::saml-metadata.asciidoc[] include::setup-passwords.asciidoc[] diff --git a/docs/reference/commands/migrate-tool.asciidoc b/docs/reference/commands/migrate-tool.asciidoc deleted file mode 100644 index a1903ac69dacf..0000000000000 --- a/docs/reference/commands/migrate-tool.asciidoc +++ /dev/null @@ -1,110 +0,0 @@ -[role="xpack"] -[testenv="gold+"] -[[migrate-tool]] -== elasticsearch-migrate - -The `elasticsearch-migrate` command migrates existing file-based users and roles -to the native realm. From 5.0 onward, you should use the `native` realm to -manage roles and local users. - - -[float] -=== Synopsis - -[source,shell] --------------------------------------------------- -bin/elasticsearch-migrate -(native (-U, --url ) -[-h, --help] [-E ] -[-n, --users ] [-r, --roles ] -[-u, --username ] [-p, --password ] -[-s, --silent] [-v, --verbose]) --------------------------------------------------- - -[float] -=== Description - -NOTE: When migrating from Shield 2.x, the `elasticsearch-migrate` tool should be -run prior to upgrading to ensure all roles can be migrated as some may be in a -deprecated format that {xpack} cannot read. The `migrate` tool is available in -Shield 2.4.0 and higher. - -The `elasticsearch-migrate` tool loads the existing file-based users and roles -and calls the user and roles APIs to add them to the native realm. You can -migrate all users and roles, or specify the ones you want to migrate. Users and -roles that already exist in the `native` realm are not replaced or -overridden. If the names you specify with the `--users` and `--roles` options -don't exist in the `file` realm, they are skipped. - -[float] -[[migrate-tool-options]] -=== Parameters -The `native` subcommand supports the following options: - -`-E `:: -Configures a setting. - -`-h, --help`:: -Returns all of the command parameters. - -`-n`, `--users`:: -Comma-separated list of the users that you want to migrate. If this parameter is -not specified, all users are migrated. - -`-p`, `--password`:: -Password to use for authentication with {es}. -//TBD: What is the default if this isn't specified? - -`-r`, `--roles`:: -Comma-separated list of the roles that you want to migrate. If this parameter is -not specified, all roles are migrated. - -`-s, --silent`:: Shows minimal output. - -`-U`, `--url`:: -Endpoint URL of the {es} cluster to which you want to migrate the -file-based users and roles. This parameter is required. - -`-u`, `--username`:: -Username to use for authentication with {es}. -//TBD: What is the default if this isn't specified? - -`-v, --verbose`:: Shows verbose output. - -[float] -=== Examples - -Run the `elasticsearch-migrate` tool when {xpack} is installed. For example: - -[source, sh] ----------------------------------------------------------------------- -$ bin/elasticsearch-migrate native -U http://localhost:9200 -u elastic --p x-pack-test-password -n lee,foo -r role1,role2,role3,role4,foo -starting migration of users and roles... -importing users from [/home/es/config/shield/users]... -found existing users: [test_user, joe3, joe2] -migrating user [lee] -{"user":{"created":true}} -no user [foo] found, skipping -importing roles from [/home/es/config/shield/roles.yml]... -found existing roles: [marvel_user, role_query_fields, admin_role, role3, admin, -remote_marvel_agent, power_user, role_new_format_name_array, role_run_as, -logstash, role_fields, role_run_as1, role_new_format, kibana4_server, user, -transport_client, role1.ab, role_query] -migrating role [role1] -{"role":{"created":true}} -migrating role [role2] -{"role":{"created":true}} -role [role3] already exists, skipping -no role [foo] found, skipping -users and roles imported. ----------------------------------------------------------------------- - -Additionally, the `-E` flag can be used to specify additional settings. For example -to specify a different configuration directory, the command would look like: - -[source, sh] ----------------------------------------------------------------------- -$ bin/elasticsearch-migrate native -U http://localhost:9200 -u elastic --p x-pack-test-password -E path.conf=/etc/elasticsearch ----------------------------------------------------------------------- diff --git a/docs/reference/commands/node-tool.asciidoc b/docs/reference/commands/node-tool.asciidoc index f070d11aa8fb0..ed810a4dac014 100644 --- a/docs/reference/commands/node-tool.asciidoc +++ b/docs/reference/commands/node-tool.asciidoc @@ -4,14 +4,15 @@ The `elasticsearch-node` command enables you to perform certain unsafe operations on a node that are only possible while it is shut down. This command allows you to adjust the <> of a node and may be able to -recover some data after a disaster. +recover some data after a disaster or start a node even if it is incompatible +with the data on disk. [float] === Synopsis [source,shell] -------------------------------------------------- -bin/elasticsearch-node repurpose|unsafe-bootstrap|detach-cluster +bin/elasticsearch-node repurpose|unsafe-bootstrap|detach-cluster|override-version [--ordinal ] [-E ] [-h, --help] ([-s, --silent] | [-v, --verbose]) -------------------------------------------------- @@ -19,7 +20,7 @@ bin/elasticsearch-node repurpose|unsafe-bootstrap|detach-cluster [float] === Description -This tool has three modes: +This tool has four modes: * `elasticsearch-node repurpose` can be used to delete unwanted data from a node if it used to be a <> or a @@ -36,6 +37,11 @@ This tool has three modes: cluster bootstrapping was not possible, it also enables you to move nodes into a brand-new cluster. +* `elasticsearch-node override-version` enables you to start up a node + even if the data in the data path was written by an incompatible version of + {es}. This may sometimes allow you to downgrade to an earlier version of + {es}. + [[node-tool-repurpose]] [float] ==== Changing the role of a node @@ -109,6 +115,25 @@ way forward that does not risk data loss, but it may be possible to use the `elasticsearch-node` tool to construct a new cluster that contains some of the data from the failed cluster. +[[node-tool-override-version]] +[float] +==== Bypassing version checks + +The data that {es} writes to disk is designed to be read by the current version +and a limited set of future versions. It cannot generally be read by older +versions, nor by versions that are more than one major version newer. The data +stored on disk includes the version of the node that wrote it, and {es} checks +that it is compatible with this version when starting up. + +In rare circumstances it may be desirable to bypass this check and start up an +{es} node using data that was written by an incompatible version. This may not +work if the format of the stored data has changed, and it is a risky process +because it is possible for the format to change in ways that {es} may +misinterpret, silently leading to data loss. + +To bypass this check, you can use the `elasticsearch-node override-version` +tool to overwrite the version number stored in the data path with the current +version, causing {es} to believe that it is compatible with the on-disk data. [[node-tool-unsafe-bootstrap]] [float] @@ -262,6 +287,9 @@ one-node cluster. `detach-cluster`:: Specifies to unsafely detach this node from its cluster so it can join a different cluster. +`override-version`:: Overwrites the version number stored in the data path so +that a node can start despite being incompatible with the on-disk data. + `--ordinal `:: If there is <> then this specifies which node to target. Defaults to `0`, meaning to use the first node in the data path. @@ -423,3 +451,32 @@ Do you want to proceed? Confirm [y/N] y Node was successfully detached from the cluster ---- + +[float] +==== Bypassing version checks + +Run the `elasticsearch-node override-version` command to overwrite the version +stored in the data path so that a node can start despite being incompatible +with the data stored in the data path: + +[source, txt] +---- +node$ ./bin/elasticsearch-node override-version + + WARNING: Elasticsearch MUST be stopped before running this tool. + +This data path was last written by Elasticsearch version [x.x.x] and may no +longer be compatible with Elasticsearch version [y.y.y]. This tool will bypass +this compatibility check, allowing a version [y.y.y] node to start on this data +path, but a version [y.y.y] node may not be able to read this data or may read +it incorrectly leading to data loss. + +You should not use this tool. Instead, continue to use a version [x.x.x] node +on this data path. If necessary, you can use reindex-from-remote to copy the +data from here into an older cluster. + +Do you want to proceed? + +Confirm [y/N] y +Successfully overwrote this node's metadata to bypass its version compatibility checks. +---- diff --git a/docs/reference/data-frames/apis/get-transform-stats.asciidoc b/docs/reference/data-frames/apis/get-transform-stats.asciidoc index 85e5001b13a9a..09c383f249488 100644 --- a/docs/reference/data-frames/apis/get-transform-stats.asciidoc +++ b/docs/reference/data-frames/apis/get-transform-stats.asciidoc @@ -65,27 +65,35 @@ The API returns the following results: { "id" : "ecommerce_transform", "state" : { + "task_state" : "started", "indexer_state" : "started", - "task_state": "started", - "current_position" : { - "customer_id" : "9" - }, - "generation" : 1 + "checkpoint" : 1, + "progress" : { + "total_docs" : 1220, + "docs_remaining" : 0, + "percent_complete" : 100.0 + } }, "stats" : { - "pages_processed" : 0, - "documents_processed" : 0, - "documents_indexed" : 0, - "trigger_count" : 0, - "index_time_in_ms" : 0, - "index_total" : 0, + "pages_processed" : 2, + "documents_processed" : 1220, + "documents_indexed" : 13, + "trigger_count" : 1, + "index_time_in_ms" : 19, + "index_total" : 1, "index_failures" : 0, - "search_time_in_ms" : 0, - "search_total" : 0, + "search_time_in_ms" : 52, + "search_total" : 2, "search_failures" : 0 + }, + "checkpointing" : { + "current" : { + "timestamp_millis" : 1557474786393 + }, + "operations_behind" : 0 } } ] } ---- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE diff --git a/docs/reference/data-frames/apis/get-transform.asciidoc b/docs/reference/data-frames/apis/get-transform.asciidoc index 85e56aa21cdd1..e2b5c5eccb7da 100644 --- a/docs/reference/data-frames/apis/get-transform.asciidoc +++ b/docs/reference/data-frames/apis/get-transform.asciidoc @@ -75,10 +75,20 @@ The API returns the following results: "transforms" : [ { "id" : "ecommerce_transform", - "source" : "kibana_sample_data_ecommerce", - "dest" : "kibana_sample_data_ecommerce_transform", - "query" : { - "match_all" : { } + "source" : { + "index" : [ + "kibana_sample_data_ecommerce" + ], + "query" : { + "term" : { + "geoip.continent_name" : { + "value" : "Asia" + } + } + } + }, + "dest" : { + "index" : "kibana_sample_data_ecommerce_transform" }, "pivot" : { "group_by" : { @@ -95,7 +105,8 @@ The API returns the following results: } } } - } + }, + "description" : "Maximum priced ecommerce data by customer_id in Asia" } ] } diff --git a/docs/reference/data-frames/apis/put-transform.asciidoc b/docs/reference/data-frames/apis/put-transform.asciidoc index 222d93dfe4256..f452c38ab4c94 100644 --- a/docs/reference/data-frames/apis/put-transform.asciidoc +++ b/docs/reference/data-frames/apis/put-transform.asciidoc @@ -15,7 +15,13 @@ Instantiates a {dataframe-transform}. `PUT _data_frame/transforms/` -//===== Description +===== Description + +IMPORTANT: You must use {kib} or this API to create a {dataframe-transform}. + Do not put a {dataframe-transform} directly into any + `.data-frame-internal*` indices using the Elasticsearch index API. + If {es} {security-features} are enabled, do not give users any + privileges on `.data-frame-internal*` indices. ==== Path Parameters @@ -27,12 +33,12 @@ Instantiates a {dataframe-transform}. ==== Request Body -`source`:: (object) The source configuration, consisting of `index` and optionally +`source` (required):: (object) The source configuration, consisting of `index` and optionally a `query`. -`dest`:: (object) The destination configuration, consisting of `index`. +`dest` (required):: (object) The destination configuration, consisting of `index`. -`pivot`:: Defines the pivot function `group by` fields and the aggregation to +`pivot`:: (object) Defines the pivot function `group by` fields and the aggregation to reduce the data. `description`:: Optional free text description of the data frame transform diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 6087a1ba3b3b3..e106c2b16eea7 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -287,4 +287,4 @@ See <>. [float] [[bulk-partial-responses]] === Partial responses -To ensure fast responses, the multi search API will respond with partial results if one or more shards fail. See <> for more information. \ No newline at end of file +To ensure fast responses, the bulk API will respond with partial results if one or more shards fail. See <> for more information. \ No newline at end of file diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index b085e081b4dd7..e96c262d67bb4 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -982,6 +982,10 @@ Reindex supports <> to parallelize the reindexing process. This parallelization can improve efficiency and provide a convenient way to break the request down into smaller parts. +NOTE: Reindexing from remote clusters does not support +<> or +<>. + [float] [[docs-reindex-manual-slice]] ==== Manual slicing diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index 945f80babad1b..00578ce8c050f 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -84,6 +84,10 @@ executing. The below list shows the actions which are available in each phase. +NOTE: The order that configured actions are performed in within each phase is +determined by automatically by {ilm-init}, and cannot be changed by changing the +policy definition. + * Hot - <> - <> diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index 705fb81b09c8c..6821e583a79dd 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -29,13 +29,11 @@ The data in the translog is only persisted to disk when the translog is ++fsync++ed and committed. In the event of hardware failure, any data written since the previous translog commit will be lost. -By default, Elasticsearch ++fsync++s and commits the translog every 5 seconds -if `index.translog.durability` is set to `async` or if set to `request` -(default) at the end of every <>, <>, -<>, or <> request. More precisely, if set -to `request`, Elasticsearch will only report success of an index, delete, +By default, `index.translog.durability` is set to `request` meaning that Elasticsearch will only report success of an index, delete, update, or bulk request to the client after the translog has been successfully -++fsync++ed and committed on the primary and on every allocated replica. +++fsync++ed and committed on the primary and on every allocated replica. If +`index.translog.durability` is set to `async` then Elasticsearch ++fsync++s +and commits the translog every `index.translog.sync_interval` (defaults to 5 seconds). The following <> per-index settings control the behaviour of the translog: diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 3c8d8e9abf2bf..1f8abc5675db9 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -864,6 +864,7 @@ include::processors/foreach.asciidoc[] include::processors/geoip.asciidoc[] include::processors/grok.asciidoc[] include::processors/gsub.asciidoc[] +include::processors/html_strip.asciidoc[] include::processors/join.asciidoc[] include::processors/json.asciidoc[] include::processors/kv.asciidoc[] diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index f38e62806bb9d..4e866624309a7 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -27,7 +27,7 @@ uncompressed. The `ingest-geoip` config directory is located at `$ES_HOME/config | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== -*Depends on what is available in `database_field`: +*Depends on what is available in `database_file`: * If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` diff --git a/docs/reference/ingest/processors/html_strip.asciidoc b/docs/reference/ingest/processors/html_strip.asciidoc new file mode 100644 index 0000000000000..2fa3cd7bbb8ae --- /dev/null +++ b/docs/reference/ingest/processors/html_strip.asciidoc @@ -0,0 +1,26 @@ +[[htmlstrip-processor]] +=== HTML Strip Processor +Removes HTML from field. + +NOTE: Each HTML tag is replaced with a `\n` character. + +[[htmlstrip-options]] +.HTML Strip Options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The string-valued field to remove HTML tags from +| `target_field` | no | `field` | The field to assign the value to, by default `field` is updated in-place +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document +include::common-options.asciidoc[] +|====== + +[source,js] +-------------------------------------------------- +{ + "html_strip": { + "field": "foo" + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/migration/migrate_8_0/reindex.asciidoc b/docs/reference/migration/migrate_8_0/reindex.asciidoc index ebba0f2aebe1d..ef4f5aed147ca 100644 --- a/docs/reference/migration/migrate_8_0/reindex.asciidoc +++ b/docs/reference/migration/migrate_8_0/reindex.asciidoc @@ -7,4 +7,9 @@ re-encode them when generating the search request for the remote host. This leniency has been removed such that all index-names are correctly encoded when reindex generates remote search requests. -Instead, please specify the index-name without any encoding. \ No newline at end of file +Instead, please specify the index-name without any encoding. + +[float] +==== Removal of types + +The `/{index}/{type}/_delete_by_query` and `/{index}/{type}/_update_by_query` REST endpoints have been removed in favour of `/{index}/_delete_by_query` and `/{index}/_update_by_query`, since indexes no longer contain types, these typed endpoints are obsolete. \ No newline at end of file diff --git a/docs/reference/migration/migrate_8_0/search.asciidoc b/docs/reference/migration/migrate_8_0/search.asciidoc new file mode 100644 index 0000000000000..82886d35bc6a5 --- /dev/null +++ b/docs/reference/migration/migrate_8_0/search.asciidoc @@ -0,0 +1,10 @@ +[float] +[[breaking_80_search_changes]] +=== Search Changes + +[float] +==== Removal of types + +The `/{index}/{type}/_search`, `/{index}/{type}/_msearch`, `/{index}/{type}/_search/template` and `/{index}/{type}/_msearch/template` REST endpoints have been removed in favour of `/{index}/_search`, `/{index}/_msearch`, `/{index}/_search/template` and `/{index}/_msearch/template`, since indexes no longer contain types, these typed endpoints are obsolete.. + +The `/{index}/{type}/_termvectors`, `/{index}/{type}/{id}/_termvectors` and `/{index}/{type}/_mtermvectors` REST endpoints have been removed in favour of `/{index}/_termvectors`, `/{index}/{id}/_termvectors` and `/{index}/_mtermvectors`, since indexes no longer contain types, these typed endpoints are obsolete.. \ No newline at end of file diff --git a/docs/reference/migration/migrate_8_0/security.asciidoc b/docs/reference/migration/migrate_8_0/security.asciidoc index fcc0a5b22168a..a7cacef8ff017 100644 --- a/docs/reference/migration/migrate_8_0/security.asciidoc +++ b/docs/reference/migration/migrate_8_0/security.asciidoc @@ -25,3 +25,11 @@ The `xpack.security.authz.store.roles.index.cache.max_size` and been removed. These settings have been redundant and deprecated since the 5.2 release of {es}. +[float] +[[migrate-tool-removed]] +==== The `elasticsearch-migrate` tool has been removed + +The `elasticsearch-migrate` tool provided a way to convert file +realm users and roles into the native realm. It has been deprecated +since 7.2.0. Users and roles should now be created in the native +realm directly. diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index 0ba7d4b17cef7..2b17af17ec5da 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -10,11 +10,22 @@ data folder and freshly-started nodes that are joining an existing cluster obtain this information from the cluster's elected master. The initial set of master-eligible nodes is defined in the -<>. This is a list -of the <> or IP addresses of the master-eligible nodes in -the new cluster. If you do not configure `node.name` then it is set to the -node's hostname, so in this case you can use hostnames in -`cluster.initial_master_nodes` too. +<>. This should be +set to a list containing one of the following items for each master-eligible +node: + +- The <> of the node. +- The node's hostname if `node.name` is not set, because `node.name` defaults + to the node's hostname. You must use either the fully-qualified hostname or + the bare hostname <>. +- The IP address of the node's <>, if it is + not possible to use the `node.name` of the node. This is normally the IP + address to which <> resolves but + <>. +- The IP address and port of the node's publish address, in the form `IP:PORT`, + if it is not possible to use the `node.name` of the node and there are + multiple nodes sharing a single IP address. When you start a master-eligible node, you can provide this setting on the command line or in the `elasticsearch.yml` file. After the cluster has formed, @@ -47,9 +58,9 @@ cluster.initial_master_nodes: - master-c -------------------------------------------------- -You can use a mix of IP addresses and node names too. If there is more than one -Elasticsearch node with the same IP address then the transport port must also -be given to specify exactly which node is meant: +If it is not possible to use the names of the nodes then you can also use IP +addresses, or IP addresses and ports, or even a mix of IP addresses and node +names: [source,yaml] -------------------------------------------------- @@ -57,7 +68,7 @@ cluster.initial_master_nodes: - 10.0.10.101 - 10.0.10.102:9300 - 10.0.10.102:9301 - - master-node-hostname + - master-node-name -------------------------------------------------- Like all node settings, it is also possible to specify the initial set of master diff --git a/docs/reference/modules/discovery/discovery-settings.asciidoc b/docs/reference/modules/discovery/discovery-settings.asciidoc index 541cb15bf1108..84472552cfced 100644 --- a/docs/reference/modules/discovery/discovery-settings.asciidoc +++ b/docs/reference/modules/discovery/discovery-settings.asciidoc @@ -31,7 +31,7 @@ Discovery and cluster formation are also affected by the following _expert-level_ settings, although it is not recommended to change any of these from their default values. -[WARNING] If you adjust these settings then your cluster may not form correctly +WARNING: If you adjust these settings then your cluster may not form correctly or may become unstable or intolerant of certain failures. `discovery.cluster_formation_warning_timeout`:: diff --git a/docs/reference/modules/indices/search-settings.asciidoc b/docs/reference/modules/indices/search-settings.asciidoc index ad75de1291cdc..30137fa382779 100644 --- a/docs/reference/modules/indices/search-settings.asciidoc +++ b/docs/reference/modules/indices/search-settings.asciidoc @@ -3,6 +3,7 @@ The following _expert_ setting can be set to manage global search limits. +[[indices-query-bool-max-clause-count]] `indices.query.bool.max_clause_count`:: Defaults to `1024`. diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index d0f68e37730b8..3bea925f972e5 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -52,13 +52,14 @@ There are several thread pools, but the important ones include: Mainly for java client executing of action when listener threaded is set to true. Thread pool type is `scaling` with a default max of `min(10, (# of available processors)/2)`. -Changing a specific thread pool can be done by setting its type-specific parameters; for example, changing the `bulk` -thread pool to have more threads: +Changing a specific thread pool can be done by setting its type-specific +parameters; for example, changing the number of threads in the `write` thread +pool: [source,yaml] -------------------------------------------------- thread_pool: - bulk: + write: size: 30 -------------------------------------------------- @@ -87,7 +88,7 @@ full, it will abort the request. [source,yaml] -------------------------------------------------- thread_pool: - bulk: + write: size: 30 queue_size: 1000 -------------------------------------------------- diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index dc624c19039ef..9c7553e314c43 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -1,100 +1,51 @@ [[query-dsl-exists-query]] === Exists Query -Returns documents that have at least one non-`null` value in the original field: +Returns documents that contain a value other than `null` or `[]` in a provided +field. + +[[exists-query-ex-request]] +==== Example request [source,js] --------------------------------------------------- +---- GET /_search { "query": { - "exists" : { "field" : "user" } - } -} --------------------------------------------------- -// CONSOLE - -For instance, these documents would all match the above query: - -[source,js] --------------------------------------------------- -{ "user": "jane" } -{ "user": "" } <1> -{ "user": "-" } <2> -{ "user": ["jane"] } -{ "user": ["jane", null ] } <3> --------------------------------------------------- -// NOTCONSOLE -<1> An empty string is a non-`null` value. -<2> Even though the `standard` analyzer would emit zero tokens, the original field is non-`null`. -<3> At least one non-`null` value is required. - -These documents would *not* match the above query: - -[source,js] --------------------------------------------------- -{ "user": null } -{ "user": [] } <1> -{ "user": [null] } <2> -{ "foo": "bar" } <3> --------------------------------------------------- -// NOTCONSOLE -<1> This field has no values. -<2> At least one non-`null` value is required. -<3> The `user` field is missing completely. - -[float] -[[null-value-mapping]] -==== `null_value` mapping - -If the field mapping includes the <> setting -then explicit `null` values are replaced with the specified `null_value`. For -instance, if the `user` field were mapped as follows: - -[source,js] --------------------------------------------------- -PUT /example -{ - "mappings": { - "properties": { - "user": { - "type": "keyword", - "null_value": "_null_" - } + "exists": { + "field": "user" + } } - } } --------------------------------------------------- +---- // CONSOLE -then explicit `null` values would be indexed as the string `_null_`, and the -following docs would match the `exists` filter: - -[source,js] --------------------------------------------------- -{ "user": null } -{ "user": [null] } --------------------------------------------------- -// NOTCONSOLE - -However, these docs--without explicit `null` values--would still have -no values in the `user` field and thus would not match the `exists` filter: +[[exists-query-top-level-params]] +==== Top-level parameters for `exists` +`field`:: +Name of the field you wish to search. ++ +To return a document, this field must exist and contain a value other +than `null` or `[]`. These values can include: ++ +* Empty strings, such as `""` or `"-"` +* Arrays containing `null` and another value, such as `[null, "foo"]` +* A custom <>, defined in field mapping + +[[exists-query-notes]] +==== Notes + +[[find-docs-null-values]] +===== Find documents with null values +To find documents that contain only `null` values or `[]` in a provided field, +use the `must_not` <> with the `exists` +query. + +The following search returns documents that contain only `null` values or `[]` +in the `user` field. [source,js] --------------------------------------------------- -{ "user": [] } -{ "foo": "bar" } --------------------------------------------------- -// NOTCONSOLE - -[[missing-query]] -==== `missing` query - -There isn't a `missing` query. Instead use the `exists` query inside a -`must_not` clause as follows: - -[source,js] --------------------------------------------------- +---- GET /_search { "query": { @@ -107,7 +58,5 @@ GET /_search } } } --------------------------------------------------- -// CONSOLE - -This query returns documents that have no value in the user field. +---- +// CONSOLE \ No newline at end of file diff --git a/docs/reference/query-dsl/ids-query.asciidoc b/docs/reference/query-dsl/ids-query.asciidoc index 70554e1acbf1b..43de8cb7332d3 100644 --- a/docs/reference/query-dsl/ids-query.asciidoc +++ b/docs/reference/query-dsl/ids-query.asciidoc @@ -21,8 +21,5 @@ GET /_search [[ids-query-top-level-parameters]] ==== Top-level parameters for `ids` -[cols="v,v",options="header"] -|====== -|Parameter |Description -|`values` |An array of <>. -|====== \ No newline at end of file +`values`:: +An array of <>. \ No newline at end of file diff --git a/docs/reference/query-dsl/multi-term-rewrite.asciidoc b/docs/reference/query-dsl/multi-term-rewrite.asciidoc index 0d327a40fdea3..391b42ea00791 100644 --- a/docs/reference/query-dsl/multi-term-rewrite.asciidoc +++ b/docs/reference/query-dsl/multi-term-rewrite.asciidoc @@ -1,45 +1,109 @@ [[query-dsl-multi-term-rewrite]] -== Multi Term Query Rewrite - -Multi term queries, like -<> and -<> are called -multi term queries and end up going through a process of rewrite. This -also happens on the -<>. -All of those queries allow to control how they will get rewritten using -the `rewrite` parameter: - -* `constant_score` (default): A rewrite method that performs like -`constant_score_boolean` when there are few matching terms and otherwise -visits all matching terms in sequence and marks documents for that term. -Matching documents are assigned a constant score equal to the query's -boost. -* `scoring_boolean`: A rewrite method that first translates each term -into a should clause in a boolean query, and keeps the scores as -computed by the query. Note that typically such scores are meaningless -to the user, and require non-trivial CPU to compute, so it's almost -always better to use `constant_score`. This rewrite method will hit -too many clauses failure if it exceeds the boolean query limit (defaults -to `1024`). -* `constant_score_boolean`: Similar to `scoring_boolean` except scores -are not computed. Instead, each matching document receives a constant -score equal to the query's boost. This rewrite method will hit too many -clauses failure if it exceeds the boolean query limit (defaults to -`1024`). -* `top_terms_N`: A rewrite method that first translates each term into -should clause in boolean query, and keeps the scores as computed by the -query. This rewrite method only uses the top scoring terms so it will -not overflow boolean max clause count. The `N` controls the size of the -top scoring terms to use. -* `top_terms_boost_N`: A rewrite method that first translates each term -into should clause in boolean query, but the scores are only computed as -the boost. This rewrite method only uses the top scoring terms so it -will not overflow the boolean max clause count. The `N` controls the -size of the top scoring terms to use. -* `top_terms_blended_freqs_N`: A rewrite method that first translates each -term into should clause in boolean query, but all term queries compute scores -as if they had the same frequency. In practice the frequency which is used -is the maximum frequency of all matching terms. This rewrite method only uses -the top scoring terms so it will not overflow boolean max clause count. The -`N` controls the size of the top scoring terms to use. +== `rewrite` Parameter + +WARNING: This parameter is for expert users only. Changing the value of +this parameter can impact search performance and relevance. + +{es} uses https://lucene.apache.org/core/[Apache Lucene] internally to power +indexing and searching. In their original form, Lucene cannot execute the +following queries: + +* <> +* <> +* <> +* <> +* <> + +To execute them, Lucene changes these queries to a simpler form, such as a +<> or a +https://en.wikipedia.org/wiki/Bit_array[bit set]. + +The `rewrite` parameter determines: + +* How Lucene calculates the relevance scores for each matching document +* Whether Lucene changes the original query to a `bool` +query or bit set +* If changed to a `bool` query, which `term` query clauses are included + +[float] +[[rewrite-param-valid-values]] +=== Valid values + +`constant_score` (Default):: +Uses the `constant_score_boolean` method for fewer matching terms. Otherwise, +this method finds all matching terms in sequence and returns matching documents +using a bit set. + +`constant_score_boolean`:: +Assigns each document a relevance score equal to the `boost` +parameter. ++ +This method changes the original query to a <>. This `bool` query contains a `should` clause and +<> for each matching term. ++ +This method can cause the final `bool` query to exceed the clause limit in the +<> +setting. If the query exceeds this limit, {es} returns an error. + +`scoring_boolean`:: +Calculates a relevance score for each matching document. ++ +This method changes the original query to a <>. This `bool` query contains a `should` clause and +<> for each matching term. ++ +This method can cause the final `bool` query to exceed the clause limit in the +<> +setting. If the query exceeds this limit, {es} returns an error. + +`top_terms_blended_freqs_N`:: +Calculates a relevance score for each matching document as if all terms had the +same frequency. This frequency is the maximum frequency of all matching terms. ++ +This method changes the original query to a <>. This `bool` query contains a `should` clause and +<> for each matching term. ++ +The final `bool` query only includes `term` queries for the top `N` scoring +terms. ++ +You can use this method to avoid exceeding the clause limit in the +<> +setting. + +`top_terms_boost_N`:: +Assigns each matching document a relevance score equal to the `boost` parameter. ++ +This method changes the original query to a <>. This `bool` query contains a `should` clause and +<> for each matching term. ++ +The final `bool` query only includes `term` queries for the top `N` terms. ++ +You can use this method to avoid exceeding the clause limit in the +<> +setting. + +`top_terms_N`:: +Calculates a relevance score for each matching document. ++ +This method changes the original query to a <>. This `bool` query contains a `should` clause and +<> for each matching term. ++ +The final `bool` query +only includes `term` queries for the top `N` scoring terms. ++ +You can use this method to avoid exceeding the clause limit in the +<> +setting. + +[float] +[[rewrite-param-perf-considerations]] +=== Performance considerations for the `rewrite` parameter +For most uses, we recommend using the `constant_score`, +`constant_score_boolean`, or `top_terms_boost_N` rewrite methods. + +Other methods calculate relevance scores. These score calculations are often +expensive and do not improve query results. \ No newline at end of file diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index 42d4a7b1517e3..e8d97a31fa95f 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -78,10 +78,18 @@ to be the most efficient by using the internal mechanisms. [[vector-functions]] ===== Functions for vector fields + +experimental[] + These functions are used for for <> and <> fields. +NOTE: During vector functions' calculation, all matched documents are +linearly scanned. Thus, expect the query time grow linearly +with the number of matched documents. For this reason, we recommend +to limit the number of matched documents with a `query` parameter. + For dense_vector fields, `cosineSimilarity` calculates the measure of cosine similarity between a given query vector and document vectors. diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index b25048fec90e6..0ec2e070b1c74 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -111,7 +111,7 @@ GET /my_index/_rank_eval ], "requests": [ { - "id": "amsterdam_query" + "id": "amsterdam_query", "ratings": [ ... ], "template_id": "match_one_field_query", <3> "params": { <4> diff --git a/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc b/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc index 2bc2300174ecc..1d23430e37eec 100644 --- a/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc +++ b/docs/reference/security/securing-communications/configuring-tls-docker.asciidoc @@ -2,10 +2,8 @@ [[configuring-tls-docker]] === Encrypting communications in an {es} Docker Container -Starting with version 6.0.0, {stack} {security-features} -(Gold, Platinum or Enterprise subscriptions) -https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking-6.0.0-xes.html[require SSL/TLS] -encryption for the transport networking layer. +Unless you are using a trial license, {stack} {security-features} require +SSL/TLS encryption for the transport networking layer. This section demonstrates an easy path to get started with SSL/TLS for both HTTPS and transport using the {es} Docker image. The example uses diff --git a/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc b/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc index 9d207f26a96b6..a24e272dd8937 100644 --- a/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc +++ b/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc @@ -7,8 +7,8 @@ your {es} cluster. Connections are secured using Transport Layer Security (TLS/SSL). WARNING: Clusters that do not have encryption enabled send all data in plain text -including passwords and will not be able to install a license that enables -{security-features}. +including passwords. If the {es} {security-features} are enabled, unless you +have a trial license, you must configure SSL/TLS for internode-communication. To enable encryption, you need to perform the following steps on each node in the cluster: diff --git a/docs/reference/security/securing-communications/setting-up-ssl.asciidoc b/docs/reference/security/securing-communications/setting-up-ssl.asciidoc index 90f9b040d9d54..68eda2cdc3e09 100644 --- a/docs/reference/security/securing-communications/setting-up-ssl.asciidoc +++ b/docs/reference/security/securing-communications/setting-up-ssl.asciidoc @@ -1,16 +1,15 @@ [[ssl-tls]] -=== Setting Up TLS on a cluster +=== Setting up TLS on a cluster -The {stack} {security-features} enables you to encrypt traffic to, from, and +The {stack} {security-features} enable you to encrypt traffic to, from, and within your {es} cluster. Connections are secured using Transport Layer Security (TLS), which is commonly referred to as "SSL". WARNING: Clusters that do not have encryption enabled send all data in plain text -including passwords and will not be able to install a license that enables -{security-features}. +including passwords. If the {es} {security-features} are enabled, unless you have a trial license, you must configure SSL/TLS for internode-communication. The following steps describe how to enable encryption across the various -components of the Elastic Stack. You must perform each of the steps that are +components of the {stack}. You must perform each of the steps that are applicable to your cluster. . Generate a private key and X.509 certificate for each of your {es} nodes. See @@ -22,14 +21,14 @@ enable TLS on the HTTP layer. See {ref}/configuring-tls.html#tls-transport[Encrypting Communications Between Nodes in a Cluster] and {ref}/configuring-tls.html#tls-http[Encrypting HTTP Client Communications]. -. Configure {monitoring} to use encrypted connections. See <>. +. Configure the {monitor-features} to use encrypted connections. See <>. . Configure {kib} to encrypt communications between the browser and the {kib} server and to connect to {es} via HTTPS. See -{kibana-ref}/using-kibana-with-security.html[Configuring Security in {kib}]. +{kibana-ref}/using-kibana-with-security.html[Configuring security in {kib}]. . Configure Logstash to use TLS encryption. See -{logstash-ref}/ls-security.html[Configuring Security in Logstash]. +{logstash-ref}/ls-security.html[Configuring security in {ls}]. . Configure Beats to use encrypted connections. See <>. diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 426605f63d7cb..3cfe1d1e58769 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -513,8 +513,7 @@ and `full`. Defaults to `full`. See <> for an explanation of these values. `ssl.supported_protocols`:: -Supported protocols for TLS/SSL (with versions). Defaults to `TLSv1.3,TLSv1.2,TLSv1.1` if -the JVM supports TLSv1.3, otherwise `TLSv1.2,TLSv1.1`. +Supported protocols for TLS/SSL (with versions). Defaults to `TLSv1.3,TLSv1.2,TLSv1.1`. `ssl.cipher_suites`:: Specifies the cipher suites that should be supported when communicating with the LDAP server. @@ -765,8 +764,7 @@ and `full`. Defaults to `full`. See <> for an explanation of these values. `ssl.supported_protocols`:: -Supported protocols for TLS/SSL (with versions). Defaults to `TLSv1.3,TLSv1.2,TLSv1.1` if -the JVM supports TLSv1.3, otherwise `TLSv1.2,TLSv1.1`. +Supported protocols for TLS/SSL (with versions). Defaults to `TLSv1.3,TLSv1.2,TLSv1.1`. `ssl.cipher_suites`:: Specifies the cipher suites that should be supported when communicating with the Active Directory server. @@ -1173,8 +1171,7 @@ Defaults to `full`. See <> for a more detailed explanation of these values. `ssl.supported_protocols`:: -Specifies the supported protocols for TLS/SSL. Defaults to `TLSv1.3,TLSv1.2,TLSv1.1` if -the JVM supports TLSv1.3, otherwise `TLSv1.2,TLSv1.1`. +Specifies the supported protocols for TLS/SSL. Defaults to `TLSv1.3,TLSv1.2,TLSv1.1`. `ssl.cipher_suites`:: Specifies the @@ -1494,8 +1491,7 @@ For more information, see `*.ssl.supported_protocols`:: Supported protocols with versions. Valid protocols: `SSLv2Hello`, -`SSLv3`, `TLSv1`, `TLSv1.1`, `TLSv1.2`, `TLSv1.3`. Defaults to `TLSv1.3,TLSv1.2,TLSv1.1` if -the JVM supports TLSv1.3, otherwise `TLSv1.2,TLSv1.1`. +`SSLv3`, `TLSv1`, `TLSv1.1`, `TLSv1.2`, `TLSv1.3`. Defaults to `TLSv1.3,TLSv1.2,TLSv1.1`. + -- NOTE: If `xpack.security.fips_mode.enabled` is `true`, you cannot use `SSLv2Hello` @@ -1526,13 +1522,18 @@ Controls the verification of certificates. Valid values are: The default value is `full`. `*.ssl.cipher_suites`:: -Supported cipher suites can be found in Oracle's http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html[ -Java Cryptography Architecture documentation]. Defaults to `TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256`, -`TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256`, `TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA`, `TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA`, -`TLS_RSA_WITH_AES_128_CBC_SHA256`, `TLS_RSA_WITH_AES_128_CBC_SHA`. If the _Java Cryptography Extension (JCE) Unlimited Strength -Jurisdiction Policy Files_ has been installed, the default value also includes `TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384`, -`TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384`, `TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA`, `TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA`, -`TLS_RSA_WITH_AES_256_CBC_SHA256`, `TLS_RSA_WITH_AES_256_CBC_SHA`. +Supported cipher suites can be found in Oracle's +https://docs.oracle.com/en/java/javase/11/security/oracle-providers.html#GUID-7093246A-31A3-4304-AC5F-5FB6400405E2[Java Cryptography Architecture documentation]. +Defaults to `TLS_AES_256_GCM_SHA384`, `TLS_AES_128_GCM_SHA256`, +`TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384`, `TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256`, +`TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384`, `TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256`, +`TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384`, `TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256`, +`TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384`, `TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256`, +`TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA`, `TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA`, +`TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA`, `TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA`, +`TLS_RSA_WITH_AES_256_GCM_SHA384`, `TLS_RSA_WITH_AES_128_GCM_SHA256`, +`TLS_RSA_WITH_AES_256_CBC_SHA256`, `TLS_RSA_WITH_AES_128_CBC_SHA256`, +`TLS_RSA_WITH_AES_256_CBC_SHA`, `TLS_RSA_WITH_AES_128_CBC_SHA`. [float] [[tls-ssl-key-settings]] diff --git a/docs/reference/setup/bootstrap-checks-xes.asciidoc b/docs/reference/setup/bootstrap-checks-xes.asciidoc index df020bbd96276..37c90e9f4d9a3 100644 --- a/docs/reference/setup/bootstrap-checks-xes.asciidoc +++ b/docs/reference/setup/bootstrap-checks-xes.asciidoc @@ -53,9 +53,8 @@ must also be valid. === SSL/TLS check //See TLSLicenseBootstrapCheck.java -In 6.0 and later releases, if you have a gold, platinum, or enterprise license -and {es} {security-features} are enabled, you must configure SSL/TLS for -internode-communication. +If you enable {es} {security-features}, unless you have a trial license, you +must configure SSL/TLS for internode-communication. NOTE: Single-node clusters that use a loopback interface do not have this requirement. For more information, see diff --git a/docs/reference/setup/important-settings/discovery-settings.asciidoc b/docs/reference/setup/important-settings/discovery-settings.asciidoc index 5709ae3bb9345..245852b209609 100644 --- a/docs/reference/setup/important-settings/discovery-settings.asciidoc +++ b/docs/reference/setup/important-settings/discovery-settings.asciidoc @@ -49,26 +49,23 @@ discovery.seed_hosts: - 192.168.1.10:9300 - 192.168.1.11 <1> - seeds.mydomain.com <2> -cluster.initial_master_nodes: - - master-node-a <3> - - 192.168.1.12 <4> - - 192.168.1.13:9301 <5> +cluster.initial_master_nodes: <3> + - master-node-a + - master-node-b + - master-node-c -------------------------------------------------- <1> The port will default to `transport.profiles.default.port` and fallback to `transport.port` if not specified. <2> If a hostname resolves to multiple IP addresses then the node will attempt to discover other nodes at all resolved addresses. -<3> Initial master nodes can be identified by their <>, - which defaults to the hostname. Make sure that the value in - `cluster.initial_master_nodes` matches the `node.name` exactly. If you use - a fully-qualified domain name such as `master-node-a.example.com` for your - node names then you must use the fully-qualified name in this list; - conversely if `node.name` is a bare hostname without any trailing - qualifiers then you must also omit the trailing qualifiers in - `cluster.initial_master_nodes`. -<4> Initial master nodes can also be identified by their IP address. -<5> If multiple master nodes share an IP address then the transport port must - be used to distinguish between them. +<3> The initial master nodes should be identified by their + <>, which defaults to their hostname. Make sure that + the value in `cluster.initial_master_nodes` matches the `node.name` + exactly. If you use a fully-qualified domain name such as + `master-node-a.example.com` for your node names then you must use the + fully-qualified name in this list; conversely if `node.name` is a bare + hostname without any trailing qualifiers then you must also omit the + trailing qualifiers in `cluster.initial_master_nodes`. For more information, see <> and <>. diff --git a/docs/reference/setup/important-settings/heap-size.asciidoc b/docs/reference/setup/important-settings/heap-size.asciidoc index 77aa23b61df45..37e417e086e9b 100644 --- a/docs/reference/setup/important-settings/heap-size.asciidoc +++ b/docs/reference/setup/important-settings/heap-size.asciidoc @@ -7,42 +7,48 @@ to ensure that Elasticsearch has enough heap available. Elasticsearch will assign the entire heap specified in <> via the `Xms` (minimum heap size) and `Xmx` (maximum -heap size) settings. +heap size) settings. You should set these two settings to be equal to each +other. -The value for these setting depends on the amount of RAM available on your -server. Good rules of thumb are: +The value for these settings depends on the amount of RAM available on your +server: -* Set the minimum heap size (`Xms`) and maximum heap size (`Xmx`) to be equal to - each other. +* Set `Xmx` and `Xms` to no more than 50% of your physical RAM. {es} requires + memory for purposes other than the JVM heap and it is important to leave + space for this. For instance, {es} uses off-heap buffers for efficient + network communication, relies on the operating system's filesystem cache for + efficient access to files, and the JVM itself requires some memory too. It is + normal to observe the {es} process using more memory than the limit + configured with the `Xmx` setting. -* The more heap available to Elasticsearch, the more memory it can use for - caching. But note that too much heap can subject you to long garbage - collection pauses. - -* Set `Xmx` to no more than 50% of your physical RAM, to ensure that there is - enough physical RAM left for kernel file system caches. - -* Don’t set `Xmx` to above the cutoff that the JVM uses for compressed object - pointers (compressed oops); the exact cutoff varies but is near 32 GB. You can - verify that you are under the limit by looking for a line in the logs like the - following: +* Set `Xmx` and `Xms` to no more than the threshold that the JVM uses for + compressed object pointers (compressed oops); the exact threshold varies but + is near 32 GB. You can verify that you are under the threshold by looking for a + line in the logs like the following: + heap size [1.9gb], compressed ordinary object pointers [true] -* Even better, try to stay below the threshold for zero-based compressed oops; - the exact cutoff varies but 26 GB is safe on most systems, but can be as large - as 30 GB on some systems. You can verify that you are under the limit by - starting Elasticsearch with the JVM options `-XX:+UnlockDiagnosticVMOptions - -XX:+PrintCompressedOopsMode` and looking for a line like the following: +* Ideally set `Xmx` and `Xms` to no more than the threshold for zero-based + compressed oops; the exact threshold varies but 26 GB is safe on most + systems, but can be as large as 30 GB on some systems. You can verify that + you are under this threshold by starting {es} with the JVM options + `-XX:+UnlockDiagnosticVMOptions -XX:+PrintCompressedOopsMode` and looking for + a line like the following: + -- heap address: 0x000000011be00000, size: 27648 MB, zero based Compressed Oops -showing that zero-based compressed oops are enabled instead of +showing that zero-based compressed oops are enabled. If zero-based compressed +oops are not enabled then you will see a line like the following instead: heap address: 0x0000000118400000, size: 28672 MB, Compressed Oops with base: 0x00000001183ff000 -- +The more heap available to {es}, the more memory it can use for its internal +caches, but the less memory it leaves available for the operating system to use +for the filesystem cache. Also, larger heaps can cause longer garbage +collection pauses. + Here are examples of how to set the heap size via the jvm.options file: [source,txt] @@ -66,7 +72,7 @@ ES_JAVA_OPTS="-Xms4000m -Xmx4000m" ./bin/elasticsearch <2> <2> Set the minimum and maximum heap size to 4000 MB. NOTE: Configuring the heap for the <> is -different than the above. The values initially populated for the Windows service -can be configured as above but are different after the service has been +different than the above. The values initially populated for the Windows +service can be configured as above but are different after the service has been installed. Consult the <> for additional details. diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 172c7c1f17c2f..1fcc261d68e1f 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -62,6 +62,9 @@ ifeval::["{release-state}"!="unreleased"] docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" {docker-image} -------------------------------------------- +Note the use of <> that allows bypassing +the <> in a single-node development cluster. + endif::[] [[docker-cli-run-prod-mode]] diff --git a/docs/reference/setup/setup-xclient.asciidoc b/docs/reference/setup/setup-xclient.asciidoc index 9da482af8493a..24cef9c736966 100644 --- a/docs/reference/setup/setup-xclient.asciidoc +++ b/docs/reference/setup/setup-xclient.asciidoc @@ -65,6 +65,7 @@ repositories { // Add the Elasticsearch Maven Repository maven { + name "elastic" url "https://artifacts.elastic.co/maven" } } diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index 33f130a891896..4e9de8934895a 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -136,6 +136,10 @@ Query timeout (in seconds). That is the maximum amount of time waiting for a que `field.multi.value.leniency` (default `true`):: Whether to be lenient and return the first value (without any guarantees of what that will be - typically the first in natural ascending order) for fields with multiple values (true) or throw an exception. +[float] +==== Index +`index.include.frozen` (default `false`):: Whether to include <> in the query execution or not (default). + [float] ==== Additional diff --git a/docs/reference/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc index c55b379bfffe6..e44649f3a8767 100644 --- a/docs/reference/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -360,6 +360,10 @@ More information available https://docs.oracle.com/javase/8/docs/api/java/time/Z |false |Throw an exception when encountering multiple values for a field (default) or be lenient and return the first value from the list (without any guarantees of what that will be - typically the first in natural ascending order). +|index_include_frozen +|false +|Whether to include <> in the query execution or not (default). + |=== Do note that most parameters (outside the timeout and `columnar` ones) make sense only during the initial query - any follow-up pagination request only requires the `cursor` parameter as explained in the <> chapter. diff --git a/docs/reference/sql/functions/geo.asciidoc b/docs/reference/sql/functions/geo.asciidoc new file mode 100644 index 0000000000000..112ddfffce6ed --- /dev/null +++ b/docs/reference/sql/functions/geo.asciidoc @@ -0,0 +1,194 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-geo]] +=== Geo Functions + +beta[] + +The geo functions work with geometries stored in `geo_point` and `geo_shape` fields, or returned by other geo functions. + +==== Limitations + +Both <> and <> types are represented in SQL as geometry and can be used +interchangeably with the following exceptions: + +* `geo_shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping or sorting. + +* `geo_points` fields are indexed and have doc values by default, however only latitude and longitude are stored and + indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and + 8.381903171539307E-8 for longitude). The altitude component is accepted but not stored in doc values nor indexed. + Therefore calling `ST_Z` function in the filtering, grouping or sorting will return `null`. + +==== Geometry Conversion + +[[sql-functions-geo-st-as-wkt]] +===== `ST_AsWKT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_AsWKT(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: string + +.Description: + +Returns the WKT representation of the `geometry`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[aswkt] +-------------------------------------------------- + + +[[sql-functions-geo-st-wkt-to-sql]] +===== `ST_WKTToSQL` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_WKTToSQL(string<1>) +-------------------------------------------------- + +*Input*: + +<1> string WKT representation of geometry + +*Output*: geometry + +.Description: + +Returns the geometry from WKT representation. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[aswkt] +-------------------------------------------------- + +==== Geometry Properties + +[[sql-functions-geo-st-geometrytype]] +===== `ST_GeometryType` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_GeometryType(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: string + +.Description: + +Returns the type of the `geometry` such as POINT, MULTIPOINT, LINESTRING, MULTILINESTRING, POLYGON, MULTIPOLYGON, GEOMETRYCOLLECTION, ENVELOPE or CIRCLE. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[geometrytype] +-------------------------------------------------- + +[[sql-functions-geo-st-x]] +===== `ST_X` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_X(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the longitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[x] +-------------------------------------------------- + +[[sql-functions-geo-st-y]] +===== `ST_Y` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Y(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the the latitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[y] +-------------------------------------------------- + +[[sql-functions-geo-st-z]] +===== `ST_Z` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Z(geometry<1>) +-------------------------------------------------- + +*Input*: + +<1> geometry + +*Output*: double + +.Description: + +Returns the altitude of the first point in the geometry. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[z] +-------------------------------------------------- + +[[sql-functions-geo-st-distance]] +===== `ST_Distance` + +.Synopsis: +[source, sql] +-------------------------------------------------- +ST_Distance(geometry<1>, geometry<2>) +-------------------------------------------------- + +*Input*: + +<1> source geometry +<2> target geometry + +*Output*: Double + +.Description: + +Returns the distance between geometries in meters. Both geometries have to be points. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs/geo.csv-spec[distance] +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 382adeecea4ed..248c47452bab4 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -136,6 +136,14 @@ ** <> ** <> ** <> +* <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> * <> ** <> ** <> @@ -149,5 +157,6 @@ include::search.asciidoc[] include::math.asciidoc[] include::string.asciidoc[] include::type-conversion.asciidoc[] +include::geo.asciidoc[] include::conditional.asciidoc[] include::system.asciidoc[] diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 8db4c88f3a11b..ad9b2a320c0c6 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -81,6 +81,8 @@ s|SQL precision | interval_hour_to_minute | 23 | interval_hour_to_second | 23 | interval_minute_to_second | 23 +| geo_point | 52 +| geo_shape | 2,147,483,647 |=== diff --git a/docs/reference/sql/language/index.asciidoc b/docs/reference/sql/language/index.asciidoc index 6ea6a15b3ed64..2ae33d32f84ee 100644 --- a/docs/reference/sql/language/index.asciidoc +++ b/docs/reference/sql/language/index.asciidoc @@ -13,4 +13,4 @@ This chapter describes the SQL syntax and semantics supported namely: include::syntax/lexic/index.asciidoc[] include::syntax/commands/index.asciidoc[] include::data-types.asciidoc[] -include::index-patterns.asciidoc[] +include::indices.asciidoc[] diff --git a/docs/reference/sql/language/index-patterns.asciidoc b/docs/reference/sql/language/indices.asciidoc similarity index 65% rename from docs/reference/sql/language/index-patterns.asciidoc rename to docs/reference/sql/language/indices.asciidoc index 44f951d36a028..82c7f30fb041e 100644 --- a/docs/reference/sql/language/index-patterns.asciidoc +++ b/docs/reference/sql/language/indices.asciidoc @@ -5,7 +5,9 @@ {es-sql} supports two types of patterns for matching multiple indices or tables: -* {es} multi-index +[[sql-index-patterns-multi]] +[float] +=== {es} multi-index The {es} notation for enumerating, including or excluding <> is supported _as long_ as it is quoted or escaped as a table identifier. @@ -33,7 +35,9 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[fromTablePatternQuoted] NOTE: There is the restriction that all resolved concrete tables have the exact same mapping. -* SQL `LIKE` notation +[[sql-index-patterns-like]] +[float] +=== SQL `LIKE` notation The common `LIKE` statement (including escaping if needed) to match a wildcard pattern, based on one `_` or multiple `%` characters. @@ -81,3 +85,31 @@ Which one to use, is up to you however try to stick to the same one across your NOTE: As the query type of quoting between the two patterns is fairly similar (`"` vs `'`), {es-sql} _always_ requires the keyword `LIKE` for SQL `LIKE` pattern. +[[sql-index-frozen]] +== Frozen Indices + +{es} <> are a useful and powerful tool for hot/warm architecture introduced in {es} 6.6, +essentially by trading speed for memory. +{es-sql} supports frozen indices and similar to {es}, due to their performance characteristics, allows searches on them only +when explicitly told so by user - in other words, by default, frozen indices are not included in searches. + +One can toggle the use of frozen indices through: + +dedicated configuration parameter:: +Set to `true` properties `index_include_frozen` in the <> or `index.include.frozen` in the drivers to include frozen indices. + +dedicated keyword:: +Explicitly perform the inclusion through the dedicated `FROZEN` keyword in the `FROM` clause or `INCLUDE FROZEN` in the `SHOW` commands: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[showTablesIncludeFrozen] +---- + + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs/docs.csv-spec[fromTableIncludeFrozen] +---- + +Unless enabled, frozen indices are completely ignored; it is as if they do not exist and as such, queries ran against them are likely to fail. \ No newline at end of file diff --git a/docs/reference/sql/language/syntax/commands/show-tables.asciidoc b/docs/reference/sql/language/syntax/commands/show-tables.asciidoc index 691d328aa4bdd..554819e24b178 100644 --- a/docs/reference/sql/language/syntax/commands/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/commands/show-tables.asciidoc @@ -7,12 +7,14 @@ [source, sql] ---- SHOW TABLES - [table identifier | <1> - [LIKE pattern ]]? <2> + [INCLUDE FROZEN]? <1> + [table identifier | <2> + [LIKE pattern ]]? <3> ---- -<1> single table identifier or double quoted es multi index -<2> SQL LIKE pattern +<1> Whether or not to include frozen indices +<2> single table identifier or double quoted es multi index +<3> SQL LIKE pattern See <> for more information about patterns. diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index 8f7868c892e52..c5b334480c993 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -3,6 +3,14 @@ [[sql-limitations]] == SQL Limitations +[float] +[[large-parsing-trees]] +=== Large queries may throw `ParsingExpection` + +Extremely large queries can consume too much memory during the parsing phase, in which case the {es-sql} engine will +abort parsing and throw an error. In such cases, consider reducing the query to a smaller size by potentially +simplifying it or splitting it into smaller queries. + [float] [[sys-columns-describe-table-nested-fields]] === Nested fields in `SYS COLUMNS` and `DESCRIBE TABLE` @@ -142,3 +150,14 @@ SELECT count(*) FROM test GROUP BY MINUTE((CAST(date_created AS TIME)); ------------------------------------------------------------- SELECT HISTOGRAM(CAST(birth_date AS TIME), INTERVAL '10' MINUTES) as h, COUNT(*) FROM t GROUP BY h ------------------------------------------------------------- + +[float] +[[geo-sql-limitations]] +=== Geo-related functions + +Since `geo_shape` fields don't have doc values these fields cannot be used for filtering, grouping or sorting. + +By default,`geo_points` fields are indexed and have doc values. However only latitude and longitude are stored and +indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and +8.381903171539307E-8 for longitude). The altitude component is accepted but not stored in doc values nor indexed. +Therefore calling `ST_Z` function in the filtering, grouping or sorting will return `null`. diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 041b184570ab6..2cf1061e67ba7 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -168,20 +168,29 @@ include::open-ml.asciidoc[] During a rolling upgrade, the cluster continues to operate normally. However, any new functionality is disabled or operates in a backward compatible mode -until all nodes in the cluster are upgraded. New functionality -becomes operational once the upgrade is complete and all nodes are running the -new version. Once that has happened, there's no way to return to operating -in a backward compatible mode. Nodes running the previous major version will -not be allowed to join the fully-updated cluster. +until all nodes in the cluster are upgraded. New functionality becomes +operational once the upgrade is complete and all nodes are running the new +version. Once that has happened, there's no way to return to operating in a +backward compatible mode. Nodes running the previous major version will not be +allowed to join the fully-updated cluster. In the unlikely case of a network malfunction during the upgrade process that -isolates all remaining old nodes from the cluster, you must take the -old nodes offline and upgrade them to enable them to join the cluster. +isolates all remaining old nodes from the cluster, you must take the old nodes +offline and upgrade them to enable them to join the cluster. + +If you stop half or more of the master-eligible nodes all at once during the +upgrade then the cluster will become unavailable, meaning that the upgrade is +no longer a _rolling_ upgrade. If this happens, you should upgrade and restart +all of the stopped master-eligible nodes to allow the cluster to form again, as +if performing a <>. It may also +be necessary to upgrade all of the remaining old nodes before they can join the +cluster after it re-forms. Similarly, if you run a testing/development environment with only one master node, the master node should be upgraded last. Restarting a single master node forces the cluster to be reformed. The new cluster will initially only have the upgraded master node and will thus reject the older nodes when they re-join the -cluster. Nodes that have already been upgraded will successfully re-join the -upgraded master. +cluster. Nodes that have already been upgraded will successfully re-join the +upgraded master. + ==================================================== diff --git a/gradle.properties b/gradle.properties index ec79845d44e78..491770edd7c52 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,3 @@ org.gradle.daemon=true -org.gradle.jvmargs=-Xmx2g -XX:+HeapDumpOnOutOfMemoryError +org.gradle.jvmargs=-Xmx2g -XX:+HeapDumpOnOutOfMemoryError -Xss2m options.forkOptions.memoryMaximumSize=2g diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 8d172843af1d6..47216b872e431 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=f4d820c2a9685710eba5b92f10e0e4fb20e0d6c0dd1f46971e658160f25e7147 +distributionSha256Sum=14cd15fc8cc8705bd69dcfa3c8fefb27eb7027f5de4b47a8b279218f76895a91 diff --git a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfiguration.java b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfiguration.java index 68df7d248340d..52c1e8151f49d 100644 --- a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfiguration.java +++ b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfiguration.java @@ -24,7 +24,6 @@ import javax.net.ssl.X509ExtendedTrustManager; import java.nio.file.Path; import java.security.GeneralSecurityException; -import java.security.NoSuchAlgorithmException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -52,12 +51,7 @@ public class SslConfiguration { static final Map ORDERED_PROTOCOL_ALGORITHM_MAP; static { LinkedHashMap protocolAlgorithmMap = new LinkedHashMap<>(); - try { - SSLContext.getInstance("TLSv1.3"); - protocolAlgorithmMap.put("TLSv1.3", "TLSv1.3"); - } catch (NoSuchAlgorithmException e) { - // ignore since we support JVMs that do not support TLSv1.3 - } + protocolAlgorithmMap.put("TLSv1.3", "TLSv1.3"); protocolAlgorithmMap.put("TLSv1.2", "TLSv1.2"); protocolAlgorithmMap.put("TLSv1.1", "TLSv1.1"); protocolAlgorithmMap.put("TLSv1", "TLSv1"); diff --git a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java index 6e511565a9f53..e9a1ccad3e950 100644 --- a/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java +++ b/libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslConfigurationLoader.java @@ -19,21 +19,17 @@ package org.elasticsearch.common.ssl; -import javax.crypto.Cipher; +import org.elasticsearch.bootstrap.JavaVersion; + import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; import java.nio.file.Path; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.common.ssl.KeyStoreUtil.inferKeyStoreType; -import static org.elasticsearch.common.ssl.SslConfiguration.ORDERED_PROTOCOL_ALGORITHM_MAP; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CERTIFICATE_AUTHORITIES; import static org.elasticsearch.common.ssl.SslConfigurationKeys.CIPHERS; @@ -70,10 +66,36 @@ */ public abstract class SslConfigurationLoader { - static final List DEFAULT_PROTOCOLS = Collections.unmodifiableList( - ORDERED_PROTOCOL_ALGORITHM_MAP.containsKey("TLSv1.3") ? - Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1") : Arrays.asList("TLSv1.2", "TLSv1.1")); - static final List DEFAULT_CIPHERS = loadDefaultCiphers(); + static final List DEFAULT_PROTOCOLS = List.of("TLSv1.3", "TLSv1.2", "TLSv1.1"); + + private static final List JDK11_CIPHERS = List.of( + "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256", // TLSv1.3 cipher has PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256", // AEAD, hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256", // hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA"); // hardware support + + private static final List JDK12_CIPHERS = List.of( + "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256", // TLSv1.3 cipher has PFS, AEAD, hardware support + "TLS_CHACHA20_POLY1305_SHA256", // TLSv1.3 cipher has PFS, AEAD + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", // PFS, AEAD + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256", // AEAD, hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256", // hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA"); // hardware support + + static final List DEFAULT_CIPHERS = + JavaVersion.current().compareTo(JavaVersion.parse("12")) > -1 ? JDK12_CIPHERS : JDK11_CIPHERS; private static final char[] EMPTY_PASSWORD = new char[0]; private final String settingPrefix; @@ -141,9 +163,6 @@ public void setDefaultClientAuth(SslClientAuthenticationMode defaultClientAuth) /** * Change the default supported ciphers. - * The initial cipher list depends on the availability of {@link #has256BitAES() 256 bit AES}. - * - * @see #loadDefaultCiphers() */ public void setDefaultCiphers(List defaultCiphers) { this.defaultCiphers = defaultCiphers; @@ -336,40 +355,4 @@ private List resolveListSetting(String key, Function parser, L throw new SslConfigException("cannot retrieve setting [" + settingPrefix + key + "]", e); } } - - private static List loadDefaultCiphers() { - final List ciphers128 = Arrays.asList( - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", - "TLS_RSA_WITH_AES_128_CBC_SHA256", - "TLS_RSA_WITH_AES_128_CBC_SHA" - ); - final List ciphers256 = Arrays.asList( - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", - "TLS_RSA_WITH_AES_256_CBC_SHA256", - "TLS_RSA_WITH_AES_256_CBC_SHA" - ); - if (has256BitAES()) { - List ciphers = new ArrayList<>(ciphers256.size() + ciphers128.size()); - ciphers.addAll(ciphers256); - ciphers.addAll(ciphers128); - return ciphers; - } else { - return ciphers128; - } - } - - private static boolean has256BitAES() { - try { - return Cipher.getMaxAllowedKeyLength("AES") > 128; - } catch (NoSuchAlgorithmException e) { - // No AES? Things are going to be very weird, but technically that means we don't have 256 bit AES, so ... - return false; - } - } } diff --git a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java index 20a161b78fd5f..b8648efe49618 100644 --- a/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java +++ b/libs/ssl-config/src/test/java/org/elasticsearch/common/ssl/SslConfigurationLoaderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.ssl; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -33,8 +34,10 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; public class SslConfigurationLoaderTests extends ESTestCase { @@ -217,4 +220,18 @@ public void testLoadKeysFromJKS() { assertThat(keyConfig.getDependentFiles(), containsInAnyOrder(getDataPath("/certs/cert-all/certs.jks"))); assertThat(keyConfig.createKeyManager(), notNullValue()); } + + public void testChaCha20InCiphersOnJdk12Plus() { + assumeTrue("Test is only valid on JDK 12+ JVM", JavaVersion.current().compareTo(JavaVersion.parse("12")) > -1); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, hasItem("TLS_CHACHA20_POLY1305_SHA256")); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, hasItem("TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256")); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, hasItem("TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256")); + } + + public void testChaCha20NotInCiphersOnPreJdk12() { + assumeTrue("Test is only valid on pre JDK 12 JVM", JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, not(hasItem("TLS_CHACHA20_POLY1305_SHA256"))); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, not(hasItem("TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256"))); + assertThat(SslConfigurationLoader.DEFAULT_CIPHERS, not(hasItem("TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"))); + } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java index a794c409e4ef4..afde17d9ad014 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -99,14 +98,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { if (outputUnigrams) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + - "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } return this; } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index e05305a8ebd39..f095b766ee1d5 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -173,7 +173,6 @@ public Map>> getAn analyzers.put("fingerprint", FingerprintAnalyzerProvider::new); // TODO remove in 8.0 - analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); analyzers.put("snowball", SnowballAnalyzerProvider::new); @@ -233,7 +232,6 @@ public Map> getTokenFilters() { filters.put("condition", requiresAnalysisSettings((i, e, n, s) -> new ScriptedConditionTokenFilterFactory(i, n, s, scriptService.get()))); filters.put("decimal_digit", DecimalDigitFilterFactory::new); - filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new); filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new); filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new)); filters.put("dutch_stem", DutchStemTokenFilterFactory::new); @@ -376,14 +374,6 @@ public List getPreBuiltAnalyzerProviderFactorie public List getPreConfiguredCharFilters() { List filters = new ArrayList<>(); filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new)); - filters.add(PreConfiguredCharFilter.singletonWithVersion("htmlStrip", false, (reader, version) -> { - if (version.onOrAfter(org.elasticsearch.Version.V_6_3_0)) { - deprecationLogger.deprecatedAndMaybeLog("htmlStrip_deprecation", - "The [htmpStrip] char filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [html_strip] instead."); - } - return new HTMLStripCharFilter(reader); - })); return filters; } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java index 933a02f74e755..a3283769972a5 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java @@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -68,14 +67,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - } - - return this; + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java index 128b3d1cf82dc..bdbf6498bce5d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -87,13 +86,6 @@ public boolean breaksFastVectorHighlighter() { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java index 433fa4d7dee12..914c157324e2c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java @@ -22,7 +22,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.FingerprintFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -56,14 +55,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java deleted file mode 100644 index ae20a5fbf5a86..0000000000000 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.analysis.common; - -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; - -public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTokenFilterFactory { - - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(LegacyDelimitedPayloadTokenFilterFactory.class)); - - LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, env, name, settings); - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException( - "[delimited_payload_filter] is not supported for new indices, use [delimited_payload] instead"); - } - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_2_0)) { - deprecationLogger.deprecated("Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"); - } - } -} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java index 9c53fc1f63e3b..a20ed8c1e43f1 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java @@ -25,7 +25,6 @@ import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; @@ -62,18 +61,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - if (preserveOriginal) { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return IDENTITY_FILTER; - } - throw new IllegalArgumentException("Token filter [" + name() - + "] cannot be used to parse synonyms unless [preserve_original] is [true]"); - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } @Override @@ -120,18 +108,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - if (preserveOriginal) { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return IDENTITY_FILTER; - } - throw new IllegalArgumentException("Token filter [" + name() - + "] cannot be used to parse synonyms unless [preserve_original] is [true]"); - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } }; } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java index 6abf2cbd37cb2..ab8d9e4d76e8d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java @@ -27,7 +27,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; -import org.elasticsearch.Version; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -47,15 +46,10 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { this.maxGram = settings.getAsInt("max_gram", 2); int ngramDiff = maxGram - minGram; if (ngramDiff > maxAllowedNgramDiff) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException( - "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" - + maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the [" - + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting."); - } else { - deprecationLogger.deprecated("Deprecated big difference between max_gram and min_gram in NGram Tokenizer," - + "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]"); - } + throw new IllegalArgumentException( + "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" + + maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the [" + + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting."); } } @@ -67,13 +61,6 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java index e811d0fbc4bda..39ee16dd44b1a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java @@ -21,7 +21,6 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -91,15 +90,10 @@ static CharMatcher parseTokenChars(List characterClasses) { this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); int ngramDiff = maxGram - minGram; if (ngramDiff > maxAllowedNgramDiff) { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException( - "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" - + maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the [" - + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting."); - } else { - deprecationLogger.deprecated("Deprecated big difference between max_gram and min_gram in NGram Tokenizer," - + "expected difference must be less than or equal to: [" + maxAllowedNgramDiff + "]"); - } + throw new IllegalArgumentException( + "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" + + maxAllowedNgramDiff + "] but was [" + ngramDiff + "]. This limit can be set by changing the [" + + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + "] index level setting."); } this.matcher = parseTokenChars(settings.getAsList("token_chars")); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java deleted file mode 100644 index 5dd475cc5e408..0000000000000 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzerProvider.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.analysis.common; - -import org.apache.logging.log4j.LogManager; -import org.apache.lucene.analysis.CharArraySet; -import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; -import org.elasticsearch.index.analysis.Analysis; - -public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProvider { - - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(LogManager.getLogger(StandardHtmlStripAnalyzerProvider.class)); - - private final StandardHtmlStripAnalyzer analyzer; - - /** - * @deprecated in 6.5, can not create in 7.0, and we remove this in 8.0 - */ - @Deprecated - StandardHtmlStripAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); - final CharArraySet defaultStopwords = CharArraySet.EMPTY_SET; - CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords); - analyzer = new StandardHtmlStripAnalyzer(stopWords); - analyzer.setVersion(version); - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("[standard_html_strip] analyzer is not supported for new indices, " + - "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); - } else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_html_strip_deprecation", - "Deprecated analyzer [standard_html_strip] used, " + - "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); - } - } - - @Override - public StandardHtmlStripAnalyzer get() { - return this.analyzer; - } -} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java index e64bb96bfcfd4..6285d25e01f17 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java @@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -106,14 +105,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } private int getFlag(int flag, Settings settings, String key, boolean defaultValue) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java index 85f94a86c6cbf..f33687ee22439 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java @@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -112,14 +111,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } public int getFlag(int flag, Settings settings, String key, boolean defaultValue) { diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java deleted file mode 100644 index f128c07361c45..0000000000000 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.analysis.common; - -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.IndexAnalyzers; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; - -public class CommonAnalysisPluginTests extends ESTestCase { - - /** - * Check that the deprecated analyzer name "standard_html_strip" throws exception for indices created since 7.0.0 - */ - public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT)) - .put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip") - .putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b") - .build(); - - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin(); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> createTestAnalysis(idxSettings, settings, commonAnalysisPlugin)); - assertEquals("[standard_html_strip] analyzer is not supported for new indices, " + - "use a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter", ex.getMessage()); - } - - /** - * Check that the deprecated analyzer name "standard_html_strip" issues a deprecation warning for indices created since 6.5.0 until 7 - */ - public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, - VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip") - .putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b") - .build(); - - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - IndexAnalyzers analyzers = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).indexAnalyzers; - Analyzer analyzer = analyzers.get("custom_analyzer"); - assertNotNull(((NamedAnalyzer) analyzer).analyzer()); - assertWarnings( - "Deprecated analyzer [standard_html_strip] used, " + - "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"); - } - } -} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java deleted file mode 100644 index e284877978851..0000000000000 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.analysis.common; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.io.StringReader; -import java.util.Map; - - -public class HtmlStripCharFilterFactoryTests extends ESTestCase { - - /** - * Check that the deprecated name "htmlStrip" issues a deprecation warning for indices created since 6.3.0 - */ - public void testDeprecationWarning() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT)) - .build(); - - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - Map charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter; - CharFilterFactory charFilterFactory = charFilters.get("htmlStrip"); - assertNotNull(charFilterFactory.create(new StringReader("input"))); - assertWarnings("The [htmpStrip] char filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [html_strip] instead."); - } - } - - /** - * Check that the deprecated name "htmlStrip" does NOT issues a deprecation warning for indices created before 6.3.0 - */ - public void testNoDeprecationWarningPre6_3() throws IOException { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4)) - .build(); - - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - Map charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter; - CharFilterFactory charFilterFactory = charFilters.get("htmlStrip"); - assertNotNull(charFilterFactory.create(new StringReader(""))); - } - } -} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 6582188f33c0b..47fb4afae2ca8 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -42,11 +42,9 @@ import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; -import java.util.List; import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -279,27 +277,6 @@ public void testPreconfiguredTokenFilters() throws IOException { tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter(); } } - - Settings settings2 = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("path.home", createTempDir().toString()) - .putList("common_words", "a", "b") - .put("output_unigrams", "true") - .build(); - IndexSettings idxSettings2 = IndexSettingsModule.newIndexSettings("index", settings2); - - List expectedWarnings = new ArrayList<>(); - for (PreConfiguredTokenFilter tf : plugin.getPreConfiguredTokenFilters()) { - if (disallowedFilters.contains(tf.getName())) { - tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter(); - expectedWarnings.add("Token filter [" + tf.getName() + "] will not be usable to parse synonyms after v7.0"); - } - else { - tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter(); - } - } - assertWarnings(expectedWarnings.toArray(new String[0])); } public void testDisallowedTokenFilters() throws IOException { @@ -332,46 +309,6 @@ public void testDisallowedTokenFilters() throws IOException { + "] cannot be used to parse synonyms", e.getMessage()); } - - settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("path.home", createTempDir().toString()) - .putList("common_words", "a", "b") - .put("output_unigrams", "true") - .build(); - idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - - List expectedWarnings = new ArrayList<>(); - for (String factory : disallowedFactories) { - TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings); - TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); - SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings); - - stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null); - expectedWarnings.add("Token filter [" + factory - + "] will not be usable to parse synonyms after v7.0"); - } - - assertWarnings(expectedWarnings.toArray(new String[0])); - - settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("path.home", createTempDir().toString()) - .put("preserve_original", "false") - .build(); - idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - TokenFilterFactory tff = plugin.getTokenFilters().get("multiplexer").get(idxSettings, null, "multiplexer", settings); - TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings); - SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null)); - - assertEquals("Token filter [multiplexer] cannot be used to parse synonyms unless [preserve_original] is [true]", - e.getMessage()); - } private void match(String analyzerName, String source, String target) throws IOException { diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 3ce4b6bc04ed3..3486b9defd9d2 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -1135,30 +1135,6 @@ - match: { tokens.9.token: 落ち } - match: { tokens.10.token: ちた } ---- -"delimited_payload_filter_error": - - do: - catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/ - indices.create: - index: test - body: - settings: - analysis: - filter: - my_delimited_payload_filter: - type: delimited_payload_filter - delimiter: ^ - encoding: identity - - # Test pre-configured token filter too: - - do: - catch: /\[delimited_payload_filter\] is not supported for new indices, use \[delimited_payload\] instead/ - indices.analyze: - body: - text: foo|5 - tokenizer: keyword - filter: [delimited_payload_filter] - --- "delimited_payload": - do: diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml index baa32662b577f..7017b0c5a0859 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml @@ -14,56 +14,6 @@ - match: { error.type: illegal_argument_exception } - match: { error.reason: "Custom normalizer may not use filter [word_delimiter]" } ---- -"htmlStrip_deprecated": - - skip: - reason: deprecated - features: "warnings" - - - do: - indices.create: - index: test_deprecated_htmlstrip - body: - settings: - index: - analysis: - analyzer: - my_htmlStripWithCharfilter: - tokenizer: keyword - char_filter: ["htmlStrip"] - mappings: - properties: - name: - type: text - analyzer: my_htmlStripWithCharfilter - - - do: - warnings: - - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' - index: - index: test_deprecated_htmlstrip - id: 1 - body: { "name": "foo bar" } - - - do: - warnings: - - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' - index: - index: test_deprecated_htmlstrip - id: 2 - body: { "name": "foo baz" } - - - do: - warnings: - - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' - indices.analyze: - index: test_deprecated_htmlstrip - body: - analyzer: "my_htmlStripWithCharfilter" - text: "foo" - - length: { tokens: 1 } - - match: { tokens.0.token: "\nfoo\n" } - --- "Synonym filter with tokenizer": - do: diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/HtmlStripProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/HtmlStripProcessor.java new file mode 100644 index 0000000000000..aaeb5b3310b53 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/HtmlStripProcessor.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; +import org.elasticsearch.ElasticsearchException; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + +public final class HtmlStripProcessor extends AbstractStringProcessor { + + public static final String TYPE = "html_strip"; + + HtmlStripProcessor(String tag, String field, boolean ignoreMissing, String targetField) { + super(tag, field, ignoreMissing, targetField); + } + + @Override + protected String process(String value) { + // shortcut, no need to create a string builder and go through each char + if (value.contains("<") == false || value.contains(">") == false) { + return value; + } + + HTMLStripCharFilter filter = new HTMLStripCharFilter(new StringReader(value)); + + StringBuilder builder = new StringBuilder(); + int ch; + try { + while ((ch = filter.read()) != -1) { + builder.append((char)ch); + } + } catch (IOException e) { + throw new ElasticsearchException(e); + } + + return builder.toString(); + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory extends AbstractStringProcessor.Factory { + + public Factory() { + super(TYPE); + } + + @Override + protected HtmlStripProcessor newProcessor(String tag, Map config, String field, + boolean ignoreMissing, String targetField) { + return new HtmlStripProcessor(tag, field, ignoreMissing, targetField); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index c3b1328d0b276..f194fe0c3d6c0 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -87,7 +87,8 @@ public Map getProcessors(Processor.Parameters paramet entry(BytesProcessor.TYPE, new BytesProcessor.Factory()), entry(PipelineProcessor.TYPE, new PipelineProcessor.Factory(parameters.ingestService)), entry(DissectProcessor.TYPE, new DissectProcessor.Factory()), - entry(DropProcessor.TYPE, new DropProcessor.Factory())); + entry(DropProcessor.TYPE, new DropProcessor.Factory()), + entry(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory())); } @Override diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorFactoryTests.java new file mode 100644 index 0000000000000..ccadcd2770f4a --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorFactoryTests.java @@ -0,0 +1,27 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +public class HtmlStripProcessorFactoryTests extends AbstractStringProcessorFactoryTestCase { + @Override + protected AbstractStringProcessor.Factory newFactory() { + return new HtmlStripProcessor.Factory(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapperTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorTests.java similarity index 53% rename from server/src/test/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapperTests.java rename to modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorTests.java index 9566e1afa6df0..79ccff84a726e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapperTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/HtmlStripProcessorTests.java @@ -17,25 +17,22 @@ * under the License. */ -package org.elasticsearch.index.mapper; +package org.elasticsearch.ingest.common; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESSingleNodeTestCase; - -public class LegacyTypeFieldMapperTests extends ESSingleNodeTestCase { +public class HtmlStripProcessorTests extends AbstractStringProcessorTestCase { @Override - protected boolean forbidPrivateIndexSettings() { - return false; + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + return new HtmlStripProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); } - public void testDocValuesMultipleTypes() throws Exception { - TypeFieldMapperTests.testDocValues(index -> { - final Settings settings = Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), Version.V_6_0_0).build(); - return this.createIndex(index, settings); - }); + @Override + protected String modifyInput(String input) { + return "

test" + input + "

test"; } + @Override + protected String expectedResult(String input) { + return "\ntest" + input + "\ntest"; + } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml index f83a9e78cb3fe..8a803eae1fc3d 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml @@ -23,6 +23,7 @@ - contains: { nodes.$master.ingest.processors: { type: foreach } } - contains: { nodes.$master.ingest.processors: { type: grok } } - contains: { nodes.$master.ingest.processors: { type: gsub } } + - contains: { nodes.$master.ingest.processors: { type: html_strip } } - contains: { nodes.$master.ingest.processors: { type: join } } - contains: { nodes.$master.ingest.processors: { type: json } } - contains: { nodes.$master.ingest.processors: { type: kv } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yml index 11b6a64cd3fdf..9de9d19c0b879 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/40_mutate.yml @@ -76,6 +76,11 @@ teardown: "pattern" : "-", "replacement" : "." } + }, + { + "html_strip" : { + "field" : "field_to_html_strip" + } } ] } @@ -96,7 +101,8 @@ teardown: "field_to_split": "127-0-0-1", "field_to_join": ["127","0","0","1"], "field_to_convert": ["127","0","0","1"], - "field_to_gsub": "127-0-0-1" + "field_to_gsub": "127-0-0-1", + "field_to_html_strip": "

this is a test" } - do: @@ -114,6 +120,7 @@ teardown: - match: { _source.field_to_join: "127-0-0-1" } - match: { _source.field_to_convert: [127,0,0,1] } - match: { _source.field_to_gsub: "127.0.0.1" } + - match: { _source.field_to_html_strip: "\nthis \nis\n a test" } --- "Test metadata": diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index 372b328bbfc1a..a9a44d0471586 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class MultiSearchTemplateAction extends Action { @@ -32,6 +33,11 @@ private MultiSearchTemplateAction() { @Override public MultiSearchTemplateResponse newResponse() { - return new MultiSearchTemplateResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return MultiSearchTemplateResponse::new; } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index 70e36ed85a496..68adcffc4c4d0 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -43,11 +43,18 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera /** * A search template response item, holding the actual search template response, or an error message if it failed. */ - public static class Item implements Streamable { - private SearchTemplateResponse response; - private Exception exception; + public static class Item implements Writeable { + private final SearchTemplateResponse response; + private final Exception exception; - Item() { + private Item(StreamInput in) throws IOException { + if (in.readBoolean()) { + this.response = new SearchTemplateResponse(in); + this.exception = null; + } else { + exception = in.readException(); + this.response = null; + } } public Item(SearchTemplateResponse response, Exception exception) { @@ -78,22 +85,6 @@ public SearchTemplateResponse getResponse() { return this.response; } - public static Item readItem(StreamInput in) throws IOException { - Item item = new Item(); - item.readFrom(in); - return item; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - if (in.readBoolean()) { - this.response = new SearchTemplateResponse(); - response.readFrom(in); - } else { - exception = in.readException(); - } - } - @Override public void writeTo(StreamOutput out) throws IOException { if (response != null) { @@ -113,17 +104,25 @@ public Exception getFailure() { public String toString() { return "Item [response=" + response + ", exception=" + exception + "]"; } - - } - private Item[] items; - private long tookInMillis; + private final Item[] items; + private final long tookInMillis; - MultiSearchTemplateResponse() { + MultiSearchTemplateResponse(StreamInput in) throws IOException { + super(in); + items = new Item[in.readVInt()]; + for (int i = 0; i < items.length; i++) { + items[i] = new Item(in); + } + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + tookInMillis = in.readVLong(); + } else { + tookInMillis = -1L; + } } - public MultiSearchTemplateResponse(Item[] items, long tookInMillis) { + MultiSearchTemplateResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; } @@ -149,14 +148,7 @@ public TimeValue getTook() { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - items = new Item[in.readVInt()]; - for (int i = 0; i < items.length; i++) { - items[i] = Item.readItem(in); - } - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { - tookInMillis = in.readVLong(); - } + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java index 2c46b6f694add..5195ce9396313 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java @@ -19,9 +19,7 @@ package org.elasticsearch.script.mustache; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -40,10 +38,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestMultiSearchTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestMultiSearchTemplateAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in multi search template requests is deprecated."; private static final Set RESPONSE_PARAMS; @@ -65,10 +59,6 @@ public RestMultiSearchTemplateAction(Settings settings, RestController controlle controller.registerHandler(POST, "/_msearch/template", this); controller.registerHandler(GET, "/{index}/_msearch/template", this); controller.registerHandler(POST, "/{index}/_msearch/template", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_msearch/template", this); - controller.registerHandler(POST, "/{index}/{type}/_msearch/template", this); } @Override @@ -79,14 +69,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex); - - // Emit a single deprecation message if any search template contains types. - for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) { - if (searchTemplateRequest.getRequest().types().length > 0) { - deprecationLogger.deprecatedAndMaybeLog("msearch_with_types", TYPES_DEPRECATION_MESSAGE); - break; - } - } return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel)); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index 70a12f0c8bf56..f80d6ef43d05e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -54,10 +54,6 @@ public RestSearchTemplateAction(Settings settings, RestController controller) { controller.registerHandler(POST, "/_search/template", this); controller.registerHandler(GET, "/{index}/_search/template", this); controller.registerHandler(POST, "/{index}/_search/template", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_search/template", this); - controller.registerHandler(POST, "/{index}/{type}/_search/template", this); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index a08329f48dcbb..5d905ec39e1ab 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class SearchTemplateAction extends Action { @@ -32,6 +33,11 @@ private SearchTemplateAction() { @Override public SearchTemplateResponse newResponse() { - return new SearchTemplateResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return SearchTemplateResponse::new; } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index 6d19afbfd6fe6..52a3a8b28826c 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -48,6 +48,12 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToXC SearchTemplateResponse() { } + SearchTemplateResponse(StreamInput in) throws IOException { + super(in); + source = in.readOptionalBytesReference(); + response = in.readOptionalWriteable(SearchResponse::new); + } + public BytesReference getSource() { return source; } @@ -81,10 +87,8 @@ public void writeTo(StreamOutput out) throws IOException { } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - source = in.readOptionalBytesReference(); - response = in.readOptionalStreamable(SearchResponse::new); + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java deleted file mode 100644 index eacb1e3c4e803..0000000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateActionTests.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.script.mustache; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.nio.charset.StandardCharsets; - -public class RestMultiSearchTemplateActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestMultiSearchTemplateAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n" + - "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_msearch/template") - .withContent(bytesContent, XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + - "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withPath("/some_index/_msearch/template") - .withContent(bytesContent, XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java deleted file mode 100644 index 0da8afbae0402..0000000000000 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/RestSearchTemplateActionTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.script.mustache; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -public class RestSearchTemplateActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestSearchTemplateAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_search/template") - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/_search/template") - .withParams(params) - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java index fbea8d91726c9..babc3e10e55db 100644 --- a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java +++ b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextDocGenerator.java @@ -24,10 +24,12 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.painless.action.PainlessContextClassBindingInfo; import org.elasticsearch.painless.action.PainlessContextClassInfo; import org.elasticsearch.painless.action.PainlessContextConstructorInfo; import org.elasticsearch.painless.action.PainlessContextFieldInfo; import org.elasticsearch.painless.action.PainlessContextInfo; +import org.elasticsearch.painless.action.PainlessContextInstanceBindingInfo; import org.elasticsearch.painless.action.PainlessContextMethodInfo; import java.io.IOException; @@ -69,26 +71,30 @@ public final class ContextDocGenerator { public static void main(String[] args) throws IOException { List contextInfos = getContextInfos(); - Set sharedClassInfos = createShared(contextInfos); + Set sharedStaticInfos = createSharedStatics(contextInfos); + Set sharedClassInfos = createSharedClasses(contextInfos); Path rootDir = resetRootDir(); Path sharedDir = createSharedDir(rootDir); - List classInfos = sortClassInfos(new ArrayList<>(sharedClassInfos), Collections.emptySet()); + List staticInfos = sortStaticInfos(Collections.emptySet(), new ArrayList<>(sharedStaticInfos)); + List classInfos = sortClassInfos(Collections.emptySet(), new ArrayList<>(sharedClassInfos)); Map javaNamesToDisplayNames = getDisplayNames(classInfos); - printSharedIndexPage(sharedDir, javaNamesToDisplayNames, classInfos); + printSharedIndexPage(sharedDir, javaNamesToDisplayNames, staticInfos, classInfos); printSharedPackagesPages(sharedDir, javaNamesToDisplayNames, classInfos); Set isSpecialized = new HashSet<>(); for (PainlessContextInfo contextInfo : contextInfos) { - Path contextDir = createContextDir(rootDir, contextInfo); - classInfos = sortClassInfos(new ArrayList<>(contextInfo.getClasses()), sharedClassInfos); + staticInfos = createContextStatics(contextInfo); + staticInfos = sortStaticInfos(sharedStaticInfos, staticInfos); + classInfos = sortClassInfos(sharedClassInfos, new ArrayList<>(contextInfo.getClasses())); - if (classInfos.isEmpty() == false) { + if (staticInfos.isEmpty() == false || classInfos.isEmpty() == false) { + Path contextDir = createContextDir(rootDir, contextInfo); isSpecialized.add(contextInfo); javaNamesToDisplayNames = getDisplayNames(contextInfo.getClasses()); - printContextIndexPage(contextDir, javaNamesToDisplayNames, sharedClassInfos, contextInfo, classInfos); + printContextIndexPage(contextDir, javaNamesToDisplayNames, contextInfo, staticInfos, classInfos); printContextPackagesPages(contextDir, javaNamesToDisplayNames, sharedClassInfos, contextInfo, classInfos); } } @@ -123,12 +129,44 @@ private static List getContextInfos() throws IOException { return contextInfos; } - private static Set createShared(List contextInfos) { + private static Set createSharedStatics(List contextInfos) { + Map staticInfoCounts = new HashMap<>(); + + for (PainlessContextInfo contextInfo : contextInfos) { + for (PainlessContextMethodInfo methodInfo : contextInfo.getImportedMethods()) { + staticInfoCounts.merge(methodInfo, 1, Integer::sum); + } + + for (PainlessContextClassBindingInfo classBindingInfo : contextInfo.getClassBindings()) { + staticInfoCounts.merge(classBindingInfo, 1, Integer::sum); + } + + for (PainlessContextInstanceBindingInfo instanceBindingInfo : contextInfo.getInstanceBindings()) { + staticInfoCounts.merge(instanceBindingInfo, 1, Integer::sum); + } + } + + return staticInfoCounts.entrySet().stream().filter( + e -> e.getValue() == contextInfos.size() + ).map(Map.Entry::getKey).collect(Collectors.toSet()); + } + + private static List createContextStatics(PainlessContextInfo contextInfo) { + List staticInfos = new ArrayList<>(); + + staticInfos.addAll(contextInfo.getImportedMethods()); + staticInfos.addAll(contextInfo.getClassBindings()); + staticInfos.addAll(contextInfo.getInstanceBindings()); + + return staticInfos; + } + + private static Set createSharedClasses(List contextInfos) { Map classInfoCounts = new HashMap<>(); for (PainlessContextInfo contextInfo : contextInfos) { for (PainlessContextClassInfo classInfo : contextInfo.getClasses()) { - classInfoCounts.compute(classInfo, (k, v) -> v == null ? 1 : v + 1); + classInfoCounts.merge(classInfo, 1, Integer::sum); } } @@ -165,8 +203,8 @@ private static void printAutomatedMessage(PrintStream stream) { stream.println(); } - private static void printSharedIndexPage( - Path sharedDir, Map javaNamesToDisplayNames, List classInfos) throws IOException { + private static void printSharedIndexPage(Path sharedDir, Map javaNamesToDisplayNames, + List staticInfos, List classInfos) throws IOException { Path sharedIndexPath = sharedDir.resolve("index.asciidoc"); @@ -181,13 +219,12 @@ private static void printSharedIndexPage( sharedIndexStream.println(); sharedIndexStream.println("The following API is available in all contexts."); - printIndex(sharedIndexStream, SHARED_HEADER, javaNamesToDisplayNames, Collections.emptySet(), classInfos); + printIndex(sharedIndexStream, SHARED_HEADER, javaNamesToDisplayNames, staticInfos, classInfos); } } private static void printContextIndexPage(Path contextDir, Map javaNamesToDisplayNames, - Set excludes, PainlessContextInfo contextInfo, List classInfos) - throws IOException { + PainlessContextInfo contextInfo, List staticInfos, List classInfos) throws IOException { Path contextIndexPath = contextDir.resolve("index.asciidoc"); @@ -205,34 +242,58 @@ private static void printContextIndexPage(Path contextDir, Map j contextIndexStream.println( "* See the <<" + SHARED_HEADER + ", " + SHARED_NAME + " API>> for further API available in all contexts."); - printIndex(contextIndexStream, getContextHeader(contextInfo), javaNamesToDisplayNames, excludes, classInfos); + printIndex(contextIndexStream, getContextHeader(contextInfo), javaNamesToDisplayNames, staticInfos, classInfos); } } private static void printIndex(PrintStream indexStream, String contextHeader, Map javaNamesToDisplayNames, - Set excludes, List classInfos) { + List staticInfos, List classInfos) { String currentPackageName = null; - for (PainlessContextClassInfo classInfo : classInfos) { - if (excludes.contains(classInfo)) { - continue; + if (staticInfos.isEmpty() == false) { + indexStream.println(); + indexStream.println("==== Static Methods"); + indexStream.println("The following methods are directly callable without a class/instance qualifier. " + + "Note parameters denoted by a (*) are treated as read-only values."); + indexStream.println(); + + for (Object staticInfo : staticInfos) { + if (staticInfo instanceof PainlessContextMethodInfo) { + printMethod(indexStream, javaNamesToDisplayNames, false, (PainlessContextMethodInfo)staticInfo); + } else if (staticInfo instanceof PainlessContextClassBindingInfo) { + printClassBinding(indexStream, javaNamesToDisplayNames, (PainlessContextClassBindingInfo)staticInfo); + } else if (staticInfo instanceof PainlessContextInstanceBindingInfo) { + printInstanceBinding(indexStream, javaNamesToDisplayNames, (PainlessContextInstanceBindingInfo)staticInfo); + } else { + throw new IllegalArgumentException("unexpected static info type"); + } } + } - String classPackageName = classInfo.getName().substring(0, classInfo.getName().lastIndexOf('.')); + if (classInfos.isEmpty() == false) { + indexStream.println(); + indexStream.println("==== Classes By Package"); + indexStream.println("The following classes are available grouped by their respective packages. Click on a class " + + "to view details about the available methods and fields."); + indexStream.println(); - if (classPackageName.equals(currentPackageName) == false) { - currentPackageName = classPackageName; + for (PainlessContextClassInfo classInfo : classInfos) { + String classPackageName = classInfo.getName().substring(0, classInfo.getName().lastIndexOf('.')); - indexStream.println(); - indexStream.println("==== " + currentPackageName); - indexStream.println("<<" + getPackageHeader(contextHeader, currentPackageName) + ", " + - "Expand details for " + currentPackageName + ">>"); - indexStream.println(); - } + if (classPackageName.equals(currentPackageName) == false) { + currentPackageName = classPackageName; - String className = getType(javaNamesToDisplayNames, classInfo.getName()); - indexStream.println("* <<" + getClassHeader(contextHeader, className) + ", " + className + ">>"); + indexStream.println(); + indexStream.println("==== " + currentPackageName); + indexStream.println("<<" + getPackageHeader(contextHeader, currentPackageName) + ", " + + "Expand details for " + currentPackageName + ">>"); + indexStream.println(); + } + + String className = getType(javaNamesToDisplayNames, classInfo.getName()); + indexStream.println("* <<" + getClassHeader(contextHeader, className) + ", " + className + ">>"); + } } indexStream.println(); @@ -289,8 +350,8 @@ private static void printPackages(PrintStream packagesStream, String contextName packagesStream.println(); packagesStream.println("[role=\"exclude\",id=\"" + getPackageHeader(contextHeader, currentPackageName) + "\"]"); packagesStream.println("=== " + contextName + " API for package " + currentPackageName); - packagesStream.println( - "See the <<" + contextHeader + ", " + contextName + " API>> for a high-level overview of all packages."); + packagesStream.println("See the <<" + contextHeader + ", " + contextName + " API>> " + + "for a high-level overview of all packages and classes."); } String className = getType(javaNamesToDisplayNames, classInfo.getName()); @@ -421,6 +482,49 @@ private static void printMethod( stream.println(")"); } + private static void printClassBinding( + PrintStream stream, Map javaNamesToDisplayNames, PainlessContextClassBindingInfo classBindingInfo) { + + stream.print("* " + getType(javaNamesToDisplayNames, classBindingInfo.getRtn()) + " " + classBindingInfo.getName() + "("); + + for (int parameterIndex = 0; parameterIndex < classBindingInfo.getParameters().size(); ++parameterIndex) { + // temporary fix to not print org.elasticsearch.script.ScoreScript parameter until + // class instance bindings are created and the information is appropriately added to the context info classes + if ("org.elasticsearch.script.ScoreScript".equals( + getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex)))) { + continue; + } + + stream.print(getType(javaNamesToDisplayNames, classBindingInfo.getParameters().get(parameterIndex))); + + if (parameterIndex < classBindingInfo.getReadOnly()) { + stream.print(" *"); + } + + if (parameterIndex + 1 < classBindingInfo.getParameters().size()) { + stream.print(", "); + } + } + + stream.println(")"); + } + + private static void printInstanceBinding( + PrintStream stream, Map javaNamesToDisplayNames, PainlessContextInstanceBindingInfo instanceBindingInfo) { + + stream.print("* " + getType(javaNamesToDisplayNames, instanceBindingInfo.getRtn()) + " " + instanceBindingInfo.getName() + "("); + + for (int parameterIndex = 0; parameterIndex < instanceBindingInfo.getParameters().size(); ++parameterIndex) { + stream.print(getType(javaNamesToDisplayNames, instanceBindingInfo.getParameters().get(parameterIndex))); + + if (parameterIndex + 1 < instanceBindingInfo.getParameters().size()) { + stream.print(", "); + } + } + + stream.println(")"); + } + private static void printField( PrintStream stream, Map javaNamesToDisplayNames, boolean isStatic, PainlessContextFieldInfo fieldInfo) { @@ -602,15 +706,50 @@ private static String getContextName(PainlessContextInfo contextInfo) { return contextNameBuilder.substring(0, contextNameBuilder.length() - 1); } + private static List sortStaticInfos(Set staticExcludes, List staticInfos) { + staticInfos = new ArrayList<>(staticInfos); + staticInfos.removeIf(staticExcludes::contains); + + staticInfos.sort((si1, si2) -> { + String sv1; + String sv2; + + if (si1 instanceof PainlessContextMethodInfo) { + sv1 = ((PainlessContextMethodInfo)si1).getSortValue(); + } else if (si1 instanceof PainlessContextClassBindingInfo) { + sv1 = ((PainlessContextClassBindingInfo)si1).getSortValue(); + } else if (si1 instanceof PainlessContextInstanceBindingInfo) { + sv1 = ((PainlessContextInstanceBindingInfo)si1).getSortValue(); + } else { + throw new IllegalArgumentException("unexpected static info type"); + } + + if (si2 instanceof PainlessContextMethodInfo) { + sv2 = ((PainlessContextMethodInfo)si2).getSortValue(); + } else if (si2 instanceof PainlessContextClassBindingInfo) { + sv2 = ((PainlessContextClassBindingInfo)si2).getSortValue(); + } else if (si2 instanceof PainlessContextInstanceBindingInfo) { + sv2 = ((PainlessContextInstanceBindingInfo)si2).getSortValue(); + } else { + throw new IllegalArgumentException("unexpected static info type"); + } + + return sv1.compareTo(sv2); + }); + + return staticInfos; + } + private static List sortClassInfos( - List classInfos, Set excludes) { + Set classExcludes, List classInfos) { + classInfos = new ArrayList<>(classInfos); classInfos.removeIf(v -> "void".equals(v.getName()) || "boolean".equals(v.getName()) || "byte".equals(v.getName()) || "short".equals(v.getName()) || "char".equals(v.getName()) || "int".equals(v.getName()) || "long".equals(v.getName()) || "float".equals(v.getName()) || "double".equals(v.getName()) || "org.elasticsearch.painless.lookup.def".equals(v.getName()) || - isInternalClass(v.getName()) || excludes.contains(v) + isInternalClass(v.getName()) || classExcludes.contains(v) ); classInfos.sort((c1, c2) -> { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 217b5fc76588a..cb407978da83e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -101,7 +101,7 @@ private PainlessExecuteAction() { @Override public Response newResponse() { - return new Response(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } public static class Request extends SingleShardRequest implements ToXContentObject { @@ -381,20 +381,22 @@ public static class Response extends ActionResponse implements ToXContentObject private Object result; - Response() {} - Response(Object result) { this.result = result; } + Response(StreamInput in) throws IOException { + super(in); + result = in.readGenericValue(); + } + public Object getResult() { return result; } @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - result = in.readGenericValue(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override @@ -469,8 +471,8 @@ public TransportAction(ThreadPool threadPool, TransportService transportService, } @Override - protected Response newResponse() { - return new Response(); + protected Writeable.Reader getResponseReader() { + return Response::new; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Augmentation.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Augmentation.java index 0b751b7d2f78f..bbbbc3dfc37cf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Augmentation.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Augmentation.java @@ -503,4 +503,53 @@ public static String encodeBase64(String receiver) { public static String decodeBase64(String receiver) { return new String(Base64.getDecoder().decode(receiver.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); } + + /** + * Split 'receiver' by 'token' as many times as possible.. + */ + public static String[] splitOnToken(String receiver, String token) { + return splitOnToken(receiver, token, -1); + } + + /** + * Split 'receiver' by 'token' up to 'limit' times. Any limit less than 1 is ignored. + */ + public static String[] splitOnToken(String receiver, String token, int limit) { + // Check if it's even possible to perform a split + if (receiver == null || receiver.length() == 0 || token == null || token.length() == 0 || receiver.length() < token.length()) { + return new String[] { receiver }; + } + + // List of string segments we have found + ArrayList result = new ArrayList(); + + // Keep track of where we are in the string + // indexOf(tok, startPos) is faster than creating a new search context ever loop with substring(start, end) + int pos = 0; + + // Loop until we hit the limit or forever if we are passed in less than one (signifying no limit) + // If Integer.MIN_VALUE is passed in, it will still continue to loop down to 1 from MAX_VALUE + // This edge case should be fine as we are limited by receiver length (Integer.MAX_VALUE) even if we split at every char + for(;limit != 1; limit--) { + + // Find the next occurrence of token after current pos + int idx = receiver.indexOf(token, pos); + + // Reached the end of the string without another match + if (idx == -1) { + break; + } + + // Add the found segment to the result list + result.add(receiver.substring(pos, idx)); + + // Move our search position to the next possible location + pos = idx + token.length(); + } + // Add the remaining string to the result list + result.add(receiver.substring(pos)); + + // O(N) or faster depending on implementation + return result.toArray(new String[0]); + } } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt index ef2d462127f36..63ed6d41c676d 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/java.lang.txt @@ -758,6 +758,8 @@ class java.lang.String { String copyValueOf(char[],int,int) String org.elasticsearch.painless.api.Augmentation decodeBase64() String org.elasticsearch.painless.api.Augmentation encodeBase64() + String[] org.elasticsearch.painless.api.Augmentation splitOnToken(String) + String[] org.elasticsearch.painless.api.Augmentation splitOnToken(String, int) boolean endsWith(String) boolean equalsIgnoreCase(String) String format(Locale,String,def[]) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java index a0d1c5a58917e..70fbb733e2f8f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java @@ -23,6 +23,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.regex.Pattern; public class AugmentationTests extends ScriptTestCase { @@ -199,4 +200,43 @@ public void testFeatureTest() { assertEquals(8, exec("def ft = new org.elasticsearch.painless.FeatureTestObject();" + " ft.setX(3); ft.setY(2); return ft.addToTotal(3)")); } + + private static class SplitCase { + final String input; + final String token; + final int count; + + SplitCase(String input, String token, int count) { + this.input = input; + this.token = token; + this.count = count; + } + SplitCase(String input, String token) { + this(input, token, -1); + } + } + public void testString_SplitOnToken() { + SplitCase[] cases = new SplitCase[] { + new SplitCase("", ""), + new SplitCase("a,b,c", ","), + new SplitCase("a,b,c", "|"), + new SplitCase("a,,b,,c", ","), + new SplitCase("a,,b,,c", ",", 1), + new SplitCase("a,,b,,c", ",", 3), + new SplitCase("a,,b,,c", ",", 300), + new SplitCase("a,b,c", "a,b,c,d"), + new SplitCase("aaaaaaa", "a"), + new SplitCase("aaaaaaa", "a", 2), + new SplitCase("1.1.1.1.111", "1"), + new SplitCase("1.1.1.1.111", "."), + new SplitCase("1\n1.1.\r\n1\r\n111", "\r\n"), + }; + for (SplitCase split : cases) { + //System.out.println(String.format("Splitting '%s' by '%s' %d times", split.input, split.token, split.count)); + assertArrayEquals( + split.input.split(Pattern.quote(split.token), split.count), + (String[])exec("return \""+split.input+"\".splitOnToken(\""+split.token+"\", "+split.count+");") + ); + } + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/action/PainlessExecuteResponseTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/action/PainlessExecuteResponseTests.java index c75497bd630e5..ed75caff76b50 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/action/PainlessExecuteResponseTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/action/PainlessExecuteResponseTests.java @@ -18,17 +18,57 @@ */ package org.elasticsearch.painless.action; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; -public class PainlessExecuteResponseTests extends AbstractStreamableTestCase { +import java.io.IOException; + +public class PainlessExecuteResponseTests extends AbstractSerializingTestCase { @Override - protected PainlessExecuteAction.Response createBlankInstance() { - return new PainlessExecuteAction.Response(); + protected Writeable.Reader instanceReader() { + return PainlessExecuteAction.Response::new; } @Override protected PainlessExecuteAction.Response createTestInstance() { - return new PainlessExecuteAction.Response(randomAlphaOfLength(10)); + Object result; + switch (randomIntBetween(0, 2)) { + case 0: + result = randomAlphaOfLength(10); + break; + case 1: + result = randomBoolean(); + break; + case 2: + result = randomDoubleBetween(-10, 10, true); + break; + default: + throw new IllegalStateException("invalid branch"); + } + return new PainlessExecuteAction.Response(result); + } + + @Override + protected PainlessExecuteAction.Response doParseInstance(XContentParser parser) throws IOException { + parser.nextToken(); // START-OBJECT + parser.nextToken(); // FIELD-NAME + XContentParser.Token token = parser.nextToken(); // result value + Object result; + switch (token) { + case VALUE_STRING: + result = parser.text(); + break; + case VALUE_BOOLEAN: + result = parser.booleanValue(); + break; + case VALUE_NUMBER: + result = parser.doubleValue(); + break; + default: + throw new IOException("invalid response"); + } + return new PainlessExecuteAction.Response(result); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index c883bb5893c9f..3021f5b31606e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -293,17 +293,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeVInt(documents.size()); - for (BytesReference document : documents) { - out.writeBytesReference(document); - } - } else { - if (documents.size() > 1) { - throw new IllegalArgumentException("Nodes prior to 6.1.0 cannot accept multiple documents"); - } - BytesReference doc = documents.isEmpty() ? null : documents.iterator().next(); - out.writeOptionalBytesReference(doc); + out.writeVInt(documents.size()); + for (BytesReference document : documents) { + out.writeBytesReference(document); } if (documents.isEmpty() == false) { out.writeEnum(documentXContentType); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 9ad660b4e548c..d2038e2e2bfde 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -261,7 +261,7 @@ Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List createCandidateQuery(IndexReader indexReader, Versi } BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder(); - if (canUseMinimumShouldMatchField && indexVersion.onOrAfter(Version.V_6_1_0)) { + if (canUseMinimumShouldMatchField) { LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name()); for (BytesRef extractedTerm : extractedTerms) { subQueries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm))); @@ -471,9 +471,7 @@ void processQuery(Query query, ParseContext context) { for (IndexableField field : fields) { context.doc().add(field); } - if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) { - doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); - } + doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); } static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index fdcc9156b415e..bcec2548de307 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; -import org.elasticsearch.Version; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.search.SearchHit; @@ -61,7 +60,9 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept innerHitsExecute(context.query(), context.searcher(), hits); } - static void innerHitsExecute(Query mainQuery, IndexSearcher indexSearcher, SearchHit[] hits) throws IOException { + static void innerHitsExecute(Query mainQuery, + IndexSearcher indexSearcher, + SearchHit[] hits) throws IOException { List percolateQueries = locatePercolatorQuery(mainQuery); if (percolateQueries.isEmpty()) { return; @@ -71,12 +72,8 @@ static void innerHitsExecute(Query mainQuery, IndexSearcher indexSearcher, Searc for (PercolateQuery percolateQuery : percolateQueries) { String fieldName = singlePercolateQuery ? FIELD_NAME_PREFIX : FIELD_NAME_PREFIX + "_" + percolateQuery.getName(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); - // there is a bug in lucene's MemoryIndex that doesn't allow us to use docValues here... - // See https://issues.apache.org/jira/browse/LUCENE-8055 - // for now we just use version 6.0 version to find nested parent - final Version version = Version.V_6_0_0; //context.mapperService().getIndexSettings().getIndexVersionCreated(); - Weight weight = percolatorIndexSearcher.createWeight(percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter(version)), - ScoreMode.COMPLETE_NO_SCORES, 1f); + Weight weight = percolatorIndexSearcher.createWeight(percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter()), + ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0)); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); BitSet rootDocs = BitSet.of(s.iterator(), memoryIndexMaxDoc); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 38c6057e1866f..c245e2cb3a20b 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -206,20 +206,8 @@ private static BiFunction phraseQuery() { return new Result(true, Collections.emptySet(), 0); } - if (version.onOrAfter(Version.V_6_1_0)) { - Set extractions = Arrays.stream(terms).map(QueryExtraction::new).collect(toSet()); - return new Result(false, extractions, extractions.size()); - } else { - // the longest term is likely to be the rarest, - // so from a performance perspective it makes sense to extract that - Term longestTerm = terms[0]; - for (Term term : terms) { - if (longestTerm.bytes().length < term.bytes().length) { - longestTerm = term; - } - } - return new Result(false, Collections.singleton(new QueryExtraction(longestTerm)), 1); - } + Set extractions = Arrays.stream(terms).map(QueryExtraction::new).collect(toSet()); + return new Result(false, extractions, extractions.size()); }; } @@ -255,23 +243,14 @@ private static BiFunction spanTermQuery() { private static BiFunction spanNearQuery() { return (query, version) -> { SpanNearQuery spanNearQuery = (SpanNearQuery) query; - if (version.onOrAfter(Version.V_6_1_0)) { - // This has the same problem as boolean queries when it comes to duplicated clauses - // so we rewrite to a boolean query to keep things simple. - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - for (SpanQuery clause : spanNearQuery.getClauses()) { - builder.add(clause, Occur.FILTER); - } - // make sure to unverify the result - return booleanQuery().apply(builder.build(), version).unverify(); - } else { - Result bestClause = null; - for (SpanQuery clause : spanNearQuery.getClauses()) { - Result temp = analyze(clause, version); - bestClause = selectBestResult(temp, bestClause); - } - return bestClause; + // This has the same problem as boolean queries when it comes to duplicated clauses + // so we rewrite to a boolean query to keep things simple. + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (SpanQuery clause : spanNearQuery.getClauses()) { + builder.add(clause, Occur.FILTER); } + // make sure to unverify the result + return booleanQuery().apply(builder.build(), version).unverify(); }; } @@ -478,77 +457,69 @@ private static Result handleConjunction(List conjunctions, Version versi if (conjunctions.isEmpty()) { throw new IllegalArgumentException("Must have at least on conjunction sub result"); } - if (version.onOrAfter(Version.V_6_1_0)) { - for (Result subResult : conjunctions) { - if (subResult.isMatchNoDocs()) { - return subResult; - } + for (Result subResult : conjunctions) { + if (subResult.isMatchNoDocs()) { + return subResult; } - int msm = 0; - boolean verified = true; - boolean matchAllDocs = true; - boolean hasDuplicateTerms = false; - Set extractions = new HashSet<>(); - Set seenRangeFields = new HashSet<>(); - for (Result result : conjunctions) { - // In case that there are duplicate query extractions we need to be careful with - // incrementing msm, - // because that could lead to valid matches not becoming candidate matches: - // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) - // doc: field: val1 val2 val3 - // So lets be protective and decrease the msm: - int resultMsm = result.minimumShouldMatch; - for (QueryExtraction queryExtraction : result.extractions) { - if (queryExtraction.range != null) { - // In case of range queries each extraction does not simply increment the - // minimum_should_match - // for that percolator query like for a term based extraction, so that can lead - // to more false - // positives for percolator queries with range queries than term based queries. - // The is because the way number fields are extracted from the document to be - // percolated. - // Per field a single range is extracted and if a percolator query has two or - // more range queries - // on the same field, then the minimum should match can be higher than clauses - // in the CoveringQuery. - // Therefore right now the minimum should match is incremented once per number - // field when processing - // the percolator query at index time. - if (seenRangeFields.add(queryExtraction.range.fieldName)) { - resultMsm = 1; - } else { - resultMsm = 0; - } - } - - if (extractions.contains(queryExtraction)) { - + } + int msm = 0; + boolean verified = true; + boolean matchAllDocs = true; + boolean hasDuplicateTerms = false; + Set extractions = new HashSet<>(); + Set seenRangeFields = new HashSet<>(); + for (Result result : conjunctions) { + // In case that there are duplicate query extractions we need to be careful with + // incrementing msm, + // because that could lead to valid matches not becoming candidate matches: + // query: (field:val1 AND field:val2) AND (field:val2 AND field:val3) + // doc: field: val1 val2 val3 + // So lets be protective and decrease the msm: + int resultMsm = result.minimumShouldMatch; + for (QueryExtraction queryExtraction : result.extractions) { + if (queryExtraction.range != null) { + // In case of range queries each extraction does not simply increment the + // minimum_should_match + // for that percolator query like for a term based extraction, so that can lead + // to more false + // positives for percolator queries with range queries than term based queries. + // The is because the way number fields are extracted from the document to be + // percolated. + // Per field a single range is extracted and if a percolator query has two or + // more range queries + // on the same field, then the minimum should match can be higher than clauses + // in the CoveringQuery. + // Therefore right now the minimum should match is incremented once per number + // field when processing + // the percolator query at index time. + if (seenRangeFields.add(queryExtraction.range.fieldName)) { + resultMsm = 1; + } else { resultMsm = 0; - verified = false; - break; } } - msm += resultMsm; - if (result.verified == false - // If some inner extractions are optional, the result can't be verified - || result.minimumShouldMatch < result.extractions.size()) { + if (extractions.contains(queryExtraction)) { + + resultMsm = 0; verified = false; + break; } - matchAllDocs &= result.matchAllDocs; - extractions.addAll(result.extractions); } - if (matchAllDocs) { - return new Result(matchAllDocs, verified); - } else { - return new Result(verified, extractions, hasDuplicateTerms ? 1 : msm); + msm += resultMsm; + + if (result.verified == false + // If some inner extractions are optional, the result can't be verified + || result.minimumShouldMatch < result.extractions.size()) { + verified = false; } + matchAllDocs &= result.matchAllDocs; + extractions.addAll(result.extractions); + } + if (matchAllDocs) { + return new Result(matchAllDocs, verified); } else { - Result bestClause = null; - for (Result result : conjunctions) { - bestClause = selectBestResult(result, bestClause); - } - return bestClause; + return new Result(verified, extractions, hasDuplicateTerms ? 1 : msm); } } @@ -565,12 +536,7 @@ private static Result handleDisjunctionQuery(List disjunctions, int requi private static Result handleDisjunction(List disjunctions, int requiredShouldClauses, Version version) { // Keep track of the msm for each clause: List clauses = new ArrayList<>(disjunctions.size()); - boolean verified; - if (version.before(Version.V_6_1_0)) { - verified = requiredShouldClauses <= 1; - } else { - verified = true; - } + boolean verified = true; int numMatchAllClauses = 0; boolean hasRangeExtractions = false; @@ -617,10 +583,9 @@ private static Result handleDisjunction(List disjunctions, int requiredS boolean matchAllDocs = numMatchAllClauses > 0 && numMatchAllClauses >= requiredShouldClauses; int msm = 0; - if (version.onOrAfter(Version.V_6_1_0) && - // Having ranges would mean we need to juggle with the msm and that complicates this logic a lot, - // so for now lets not do it. - hasRangeExtractions == false) { + // Having ranges would mean we need to juggle with the msm and that complicates this logic a lot, + // so for now lets not do it. + if (hasRangeExtractions == false) { // Figure out what the combined msm is for this disjunction: // (sum the lowest required clauses, otherwise we're too strict and queries may not match) clauses = clauses.stream() @@ -648,103 +613,6 @@ private static Result handleDisjunction(List disjunctions, int requiredS } } - /** - * Return an extraction for the conjunction of {@code result1} and {@code result2} - * by picking up clauses that look most restrictive and making it unverified if - * the other clause is not null and doesn't match all documents. This is used by - * 6.0.0 indices which didn't use the terms_set query. - */ - static Result selectBestResult(Result result1, Result result2) { - assert result1 != null || result2 != null; - if (result1 == null) { - return result2; - } else if (result2 == null) { - return result1; - } else if (result1.matchAllDocs) { // conjunction with match_all - Result result = result2; - if (result1.verified == false) { - result = result.unverify(); - } - return result; - } else if (result2.matchAllDocs) { // conjunction with match_all - Result result = result1; - if (result2.verified == false) { - result = result.unverify(); - } - return result; - } else { - // Prefer term based extractions over range based extractions: - boolean onlyRangeBasedExtractions = true; - for (QueryExtraction clause : result1.extractions) { - if (clause.term != null) { - onlyRangeBasedExtractions = false; - break; - } - } - for (QueryExtraction clause : result2.extractions) { - if (clause.term != null) { - onlyRangeBasedExtractions = false; - break; - } - } - - if (onlyRangeBasedExtractions) { - BytesRef extraction1SmallestRange = smallestRange(result1.extractions); - BytesRef extraction2SmallestRange = smallestRange(result2.extractions); - if (extraction1SmallestRange == null) { - return result2.unverify(); - } else if (extraction2SmallestRange == null) { - return result1.unverify(); - } - - // Keep the clause with smallest range, this is likely to be the rarest. - if (extraction1SmallestRange.compareTo(extraction2SmallestRange) <= 0) { - return result1.unverify(); - } else { - return result2.unverify(); - } - } else { - int extraction1ShortestTerm = minTermLength(result1.extractions); - int extraction2ShortestTerm = minTermLength(result2.extractions); - // keep the clause with longest terms, this likely to be rarest. - if (extraction1ShortestTerm >= extraction2ShortestTerm) { - return result1.unverify(); - } else { - return result2.unverify(); - } - } - } - } - - private static int minTermLength(Set extractions) { - // In case there are only range extractions, then we return Integer.MIN_VALUE, - // so that selectBestExtraction(...) we are likely to prefer the extractions that contains at least a single extraction - if (extractions.stream().filter(queryExtraction -> queryExtraction.term != null).count() == 0 && - extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) { - return Integer.MIN_VALUE; - } - - int min = Integer.MAX_VALUE; - for (QueryExtraction qt : extractions) { - if (qt.term != null) { - min = Math.min(min, qt.bytes().length); - } - } - return min; - } - - private static BytesRef smallestRange(Set terms) { - BytesRef min = null; - for (QueryExtraction qt : terms) { - if (qt.range != null) { - if (min == null || qt.range.interval.compareTo(min) < 0) { - min = qt.range.interval; - } - } - } - return min; - } - /** * Query extraction result. A result is a candidate for a given document either if: * - `matchAllDocs` is true diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 07f47df41e60d..b191dd948c574 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -97,6 +97,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import org.junit.After; import org.junit.Before; @@ -593,7 +594,7 @@ public void testRangeQueries() throws Exception { IndexSearcher shardSearcher = newSearcher(directoryReader); shardSearcher.setQueryCache(null); - Version v = Version.V_6_1_0; + Version v = VersionUtils.randomIndexCompatibleVersion(random()); MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index f1747d1977561..f5d30a951e68c 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -399,26 +399,6 @@ public void testCreateCandidateQuery() throws Exception { assertThat(t.v1().clauses().get(2).getQuery().toString(), containsString(fieldName + ".extraction_result:failed")); } - public void testCreateCandidateQuery_oldIndex() throws Exception { - addQueryFieldMappings(); - - MemoryIndex memoryIndex = new MemoryIndex(false); - memoryIndex.addField("field1", "value1", new WhitespaceAnalyzer()); - IndexReader indexReader = memoryIndex.createSearcher().getIndexReader(); - - Tuple t = fieldType.createCandidateQuery(indexReader, Version.CURRENT); - assertTrue(t.v2()); - assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); - - t = fieldType.createCandidateQuery(indexReader, Version.V_6_0_0); - assertTrue(t.v2()); - assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); - } - public void testExtractTermsAndRanges_numberFields() throws Exception { addQueryFieldMappings(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index 89356bf274d8d..7d81df9323fbc 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; @@ -33,6 +34,7 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; +import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; @@ -58,6 +60,7 @@ public void testHitsExecute() throws Exception { PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); + memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); @@ -72,6 +75,7 @@ public void testHitsExecute() throws Exception { PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); + memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); @@ -85,6 +89,7 @@ public void testHitsExecute() throws Exception { PercolateQuery.QueryStore queryStore = ctx -> docId -> null; MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); + memoryIndex.addField(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 0), null); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index 712d5688827f2..c07467187f05f 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -75,7 +75,6 @@ import static org.elasticsearch.percolator.QueryAnalyzer.UnsupportedQueryException; import static org.elasticsearch.percolator.QueryAnalyzer.analyze; -import static org.elasticsearch.percolator.QueryAnalyzer.selectBestResult; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; @@ -148,22 +147,6 @@ public void testExtractQueryMetadata_multiPhraseQuery() { assertThat(terms.get(5).bytes().utf8ToString(), equalTo("_term6")); } - public void testExtractQueryMetadata_multiPhraseQuery_pre6dot1() { - MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() - .add(new Term("_field", "_long_term")) - .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_term")}) - .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")}) - .add(new Term[] {new Term("_field", "_very_long_term")}) - .build(); - Result result = analyze(multiPhraseQuery, Version.V_6_0_0); - assertThat(result.verified, is(false)); - assertThat(result.minimumShouldMatch, equalTo(1)); - List terms = new ArrayList<>(result.extractions); - assertThat(terms.size(), equalTo(1)); - assertThat(terms.get(0).field(), equalTo("_field")); - assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_very_long_term")); - } - public void testExtractQueryMetadata_multiPhraseQuery_dups() { MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() .add(new Term("_field", "_term1")) @@ -211,35 +194,6 @@ public void testExtractQueryMetadata_booleanQuery() { assertThat(terms.get(4).bytes(), equalTo(termQuery3.getTerm().bytes())); } - public void testExtractQueryMetadata_booleanQuery_pre6dot1() { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); - builder.add(termQuery1, BooleanClause.Occur.SHOULD); - PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); - builder.add(phraseQuery, BooleanClause.Occur.SHOULD); - - BooleanQuery.Builder subBuilder = new BooleanQuery.Builder(); - TermQuery termQuery2 = new TermQuery(new Term("_field1", "_term")); - subBuilder.add(termQuery2, BooleanClause.Occur.MUST); - TermQuery termQuery3 = new TermQuery(new Term("_field3", "_long_term")); - subBuilder.add(termQuery3, BooleanClause.Occur.MUST); - builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); - - BooleanQuery booleanQuery = builder.build(); - Result result = analyze(booleanQuery, Version.V_6_0_0); - assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); - assertThat(result.minimumShouldMatch, equalTo(1)); - List terms = new ArrayList<>(result.extractions); - terms.sort(Comparator.comparing(qt -> qt.term)); - assertThat(terms.size(), equalTo(3)); - assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); - assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); - assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[0].field())); - assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); - assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); - assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); - } - public void testExtractQueryMetadata_booleanQuery_msm() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.setMinimumNumberShouldMatch(2); @@ -326,28 +280,6 @@ public void testExtractQueryMetadata_booleanQuery_msm() { assertFalse(result.verified); } - public void testExtractQueryMetadata_booleanQuery_msm_pre6dot1() { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setMinimumNumberShouldMatch(2); - TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); - builder.add(termQuery1, BooleanClause.Occur.SHOULD); - TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); - builder.add(termQuery2, BooleanClause.Occur.SHOULD); - TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); - builder.add(termQuery3, BooleanClause.Occur.SHOULD); - - BooleanQuery booleanQuery = builder.build(); - Result result = analyze(booleanQuery, Version.V_6_0_0); - assertThat(result.verified, is(false)); - assertThat(result.minimumShouldMatch, equalTo(1)); - List extractions = new ArrayList<>(result.extractions); - extractions.sort(Comparator.comparing(extraction -> extraction.term)); - assertThat(extractions.size(), equalTo(3)); - assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1"))); - assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2"))); - assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3"))); - } - public void testExtractQueryMetadata_booleanQuery_onlyShould() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); @@ -401,12 +333,6 @@ public void testExtractQueryMetadata_booleanQueryWithMustNot() { assertThat(result.verified, is(false)); assertThat(result.minimumShouldMatch, equalTo(0)); assertTermsEqual(result.extractions); - - result = analyze(booleanQuery, Version.V_6_0_0); - assertThat(result.matchAllDocs, is(true)); - assertThat(result.verified, is(false)); - assertThat(result.minimumShouldMatch, equalTo(0)); - assertTermsEqual(result.extractions); } public void testExactMatch_booleanQuery() { @@ -657,18 +583,6 @@ public void testExtractQueryMetadata_spanNearQuery() { assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } - public void testExtractQueryMetadata_spanNearQuery_pre6dot1() { - SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); - SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); - SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) - .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); - - Result result = analyze(spanNearQuery, Version.V_6_0_0); - assertThat(result.verified, is(false)); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertTermsEqual(result.extractions, spanTermQuery2.getTerm()); - } - public void testExtractQueryMetadata_spanOrQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); @@ -966,138 +880,6 @@ public void testFunctionScoreQuery_withMatchAll() { assertThat(result.extractions.isEmpty(), is(true)); } - public void testSelectBestResult() { - Set queryTerms1 = terms(new int[0], "12", "1234", "12345"); - Result result1 = new Result(true, queryTerms1, 1); - Set queryTerms2 = terms(new int[0], "123", "1234", "12345"); - Result result2 = new Result(true, queryTerms2, 1); - Result result = selectBestResult(result1, result2); - assertSame(queryTerms2, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{1, 2, 3}); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{4, 5, 6}); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1, 2, 3}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms2, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456"); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{10}); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms2, result.extractions); - - queryTerms1 = terms(new int[]{10}, "123"); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{10}, "1", "123"); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{1}, "1", "2"); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame(queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456"); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", - queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{}); - result1 = new Result(false, queryTerms1, 0); - queryTerms2 = terms(new int[]{}); - result2 = new Result(false, queryTerms2, 0); - result = selectBestResult(result1, result2); - assertSame("In case query extractions are empty", queryTerms2, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{1}); - result1 = new Result(true, queryTerms1, 1); - queryTerms2 = terms(new int[]{}); - result2 = new Result(false, queryTerms2, 0); - result = selectBestResult(result1, result2); - assertSame("In case query a single extraction is empty", queryTerms1, result.extractions); - assertFalse(result.verified); - - queryTerms1 = terms(new int[]{}); - result1 = new Result(false, queryTerms1, 0); - queryTerms2 = terms(new int[]{1}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame("In case query a single extraction is empty", queryTerms2, result.extractions); - assertFalse(result.verified); - - result1 = new Result(true, true); - queryTerms2 = terms(new int[]{1}); - result2 = new Result(true, queryTerms2, 1); - result = selectBestResult(result1, result2); - assertSame("Conjunction with a match_all", result2, result); - assertTrue(result.verified); - - queryTerms1 = terms(new int[]{1}); - result1 = new Result(true, queryTerms2, 1); - result2 = new Result(true, true); - result = selectBestResult(result1, result2); - assertSame("Conjunction with a match_all", result1, result); - assertTrue(result.verified); - } - - public void testselectBestResult_random() { - Set terms1 = new HashSet<>(); - int shortestTerms1Length = Integer.MAX_VALUE; - int sumTermLength = randomIntBetween(1, 128); - while (sumTermLength > 0) { - int length = randomInt(sumTermLength); - shortestTerms1Length = Math.min(shortestTerms1Length, length); - terms1.add(new QueryExtraction(new Term("field", randomAlphaOfLength(length)))); - sumTermLength -= length; - } - - Set terms2 = new HashSet<>(); - int shortestTerms2Length = Integer.MAX_VALUE; - sumTermLength = randomIntBetween(1, 128); - while (sumTermLength > 0) { - int length = randomInt(sumTermLength); - shortestTerms2Length = Math.min(shortestTerms2Length, length); - terms2.add(new QueryExtraction(new Term("field", randomAlphaOfLength(length)))); - sumTermLength -= length; - } - - Result result1 = new Result(true, terms1, 1); - Result result2 = new Result(true, terms2, 1); - Result result = selectBestResult(result1, result2); - Set expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2; - assertThat(result.extractions, sameInstance(expected)); - } - public void testPointRangeQuery() { // int ranges get converted to long ranges: Query query = IntPoint.newRangeQuery("_field", 10, 20); @@ -1213,53 +995,6 @@ public void testToParentBlockJoinQuery() { assertEquals(new Term("field", "value"), result.extractions.toArray(new QueryExtraction[0])[0].term); } - public void testPointRangeQuerySelectShortestRange() { - BooleanQuery.Builder boolQuery = new BooleanQuery.Builder(); - boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); - boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - Result result = analyze(boolQuery.build(), Version.V_6_0_0); - assertFalse(result.verified); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertEquals(1, result.extractions.size()); - assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); - - boolQuery = new BooleanQuery.Builder(); - boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); - boolQuery.add(IntPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Version.V_6_0_0); - assertFalse(result.verified); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertEquals(1, result.extractions.size()); - assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); - - boolQuery = new BooleanQuery.Builder(); - boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); - boolQuery.add(DoublePoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Version.V_6_0_0); - assertFalse(result.verified); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertEquals(1, result.extractions.size()); - assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); - - boolQuery = new BooleanQuery.Builder(); - boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); - boolQuery.add(FloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Version.V_6_0_0); - assertFalse(result.verified); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertEquals(1, result.extractions.size()); - assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); - - boolQuery = new BooleanQuery.Builder(); - boolQuery.add(HalfFloatPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); - boolQuery.add(HalfFloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Version.V_6_0_0); - assertFalse(result.verified); - assertThat(result.minimumShouldMatch, equalTo(1)); - assertEquals(1, result.extractions.size()); - assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); - } - public void testPointRangeQuerySelectRanges() { BooleanQuery.Builder boolQuery = new BooleanQuery.Builder(); boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java index be232ca7c402f..5d4d140131a14 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -35,7 +35,6 @@ public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler threads = new ArrayList<>(); final Settings settings = Settings.builder() .put("path.home", createTempDir()) + .put("reindex.ssl.supported_protocols", "TLSv1.2") .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); @@ -134,6 +135,7 @@ public void testClientSucceedsWithCertificateAuthorities() throws IOException { final Settings settings = Settings.builder() .put("path.home", createTempDir()) .putList("reindex.ssl.certificate_authorities", ca.toString()) + .put("reindex.ssl.supported_protocols", "TLSv1.2") .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); @@ -149,6 +151,7 @@ public void testClientSucceedsWithVerificationDisabled() throws IOException { final Settings settings = Settings.builder() .put("path.home", createTempDir()) .put("reindex.ssl.verification_mode", "NONE") + .put("reindex.ssl.supported_protocols", "TLSv1.2") .build(); final Environment environment = TestEnvironment.newEnvironment(settings); final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class)); @@ -169,6 +172,7 @@ public void testClientPassesClientCertificate() throws IOException { .put("reindex.ssl.certificate", cert) .put("reindex.ssl.key", key) .put("reindex.ssl.key_passphrase", "client-password") + .put("reindex.ssl.supported_protocols", "TLSv1.2") .build(); AtomicReference clientCertificates = new AtomicReference<>(); handler = https -> { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index 19c5739bbc6ce..8264d4342c993 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -48,6 +49,9 @@ public class ReindexSourceTargetValidationTests extends ESTestCase { private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder() .put(index("target", "target_alias", "target_multi"), true) .put(index("target2", "target_multi"), true) + .put(index("target_with_write_index", true, "target_multi_with_write_index"), true) + .put(index("target2_without_write_index", "target_multi_with_write_index"), true) + .put(index("qux", false, "target_alias_with_write_index_disabled"), true) .put(index("foo"), true) .put(index("bar"), true) .put(index("baz"), true) @@ -78,12 +82,26 @@ public void testAliasesContainTarget() { succeeds("target", "source", "source2", "source_multi"); } - public void testTargetIsAlias() { + public void testTargetIsAliasToMultipleIndicesWithoutWriteAlias() { Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_multi", "foo")); - assertThat(e.getMessage(), containsString("Alias [target_multi] has more than one indices associated with it [[")); - // The index names can come in either order - assertThat(e.getMessage(), containsString("target")); - assertThat(e.getMessage(), containsString("target2")); + assertThat(e.getMessage(), containsString("no write index is defined for alias [target_multi]. The write index may be explicitly " + + "disabled using is_write_index=false or the alias points to multiple indices without one being designated as a " + + "write index")); + } + + public void testTargetIsAliasWithWriteIndexDisabled() { + Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_alias_with_write_index_disabled", "foo")); + assertThat(e.getMessage(), containsString("no write index is defined for alias [target_alias_with_write_index_disabled]. " + + "The write index may be explicitly disabled using is_write_index=false or the alias points to multiple indices without one " + + "being designated as a write index")); + succeeds("qux", "foo"); // writing directly into the index of which this is the alias works though + } + + public void testTargetIsWriteAlias() { + succeeds("target_multi_with_write_index", "foo"); + succeeds("target_multi_with_write_index", "target2_without_write_index"); + fails("target_multi_with_write_index", "target_multi_with_write_index"); + fails("target_multi_with_write_index", "target_with_write_index"); } public void testRemoteInfoSkipsValidation() { @@ -97,7 +115,7 @@ public void testRemoteInfoSkipsValidation() { private void fails(String target, String... sources) { Exception e = expectThrows(ActionRequestValidationException.class, () -> succeeds(target, sources)); - assertThat(e.getMessage(), containsString("reindex cannot write into an index its reading from [target]")); + assertThat(e.getMessage(), containsString("reindex cannot write into an index its reading from")); } private void succeeds(String target, String... sources) { @@ -110,12 +128,16 @@ private void succeeds(RemoteInfo remoteInfo, String target, String... sources) { } private static IndexMetaData index(String name, String... aliases) { + return index(name, null, aliases); + } + + private static IndexMetaData index(String name, @Nullable Boolean writeIndex, String... aliases) { IndexMetaData.Builder builder = IndexMetaData.builder(name).settings(Settings.builder() .put("index.version.created", Version.CURRENT.id) .put("index.number_of_shards", 1) .put("index.number_of_replicas", 1)); for (String alias: aliases) { - builder.putAlias(AliasMetaData.builder(alias).build()); + builder.putAlias(AliasMetaData.builder(alias).writeIndex(writeIndex).build()); } return builder.build(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java deleted file mode 100644 index 788fad3210037..0000000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestDeleteByQueryActionTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; - -import org.junit.Before; -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestDeleteByQueryActionTests extends RestActionTestCase { - private RestDeleteByQueryAction action; - - @Before - public void setUpAction() { - action = new RestDeleteByQueryAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.POST) - .withPath("/some_index/some_type/_delete_by_query") - .build(); - dispatchRequest(request); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - DeleteByQueryRequest dbqRequest = action.buildRequest(request); - assertArrayEquals(new String[]{"some_type"}, dbqRequest.getDocTypes()); - - // RestDeleteByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testParseEmpty() throws IOException { - DeleteByQueryRequest request = action.buildRequest(new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build()); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java deleted file mode 100644 index b5333226bb92e..0000000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestUpdateByQueryActionTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; - -import org.junit.Before; -import java.io.IOException; - -import static java.util.Collections.emptyList; - -public class RestUpdateByQueryActionTests extends RestActionTestCase { - - private RestUpdateByQueryAction action; - - @Before - public void setUpAction() { - action = new RestUpdateByQueryAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() throws IOException { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.POST) - .withPath("/some_index/some_type/_update_by_query") - .build(); - dispatchRequest(request); - - // checks the type in the URL is propagated correctly to the request object - // only works after the request is dispatched, so its params are filled from url. - UpdateByQueryRequest ubqRequest = action.buildRequest(request); - assertArrayEquals(new String[]{"some_type"}, ubqRequest.getDocTypes()); - - // RestUpdateByQueryAction itself doesn't check for a deprecated type usage - // checking here for a deprecation from its internal search request - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testParseEmpty() throws IOException { - UpdateByQueryRequest request = action.buildRequest(new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build()); - assertEquals(AbstractBulkByScrollRequest.SIZE_ALL_MATCHES, request.getSize()); - assertEquals(AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE, request.getSearchRequest().source().size()); - } -} diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java index a7042b8bfee2b..8f5ce9b0ffe4f 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java @@ -57,9 +57,6 @@ public URLBlobStore(Settings settings, URL path) { new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes(); } - /** - * {@inheritDoc} - */ @Override public String toString() { return path.toString(); @@ -83,9 +80,6 @@ public int bufferSizeInBytes() { return this.bufferSizeInBytes; } - /** - * {@inheritDoc} - */ @Override public BlobContainer blobContainer(BlobPath path) { try { @@ -95,17 +89,6 @@ public BlobContainer blobContainer(BlobPath path) { } } - /** - * This operation is not supported by URL Blob Store - */ - @Override - public void delete(BlobPath path) { - throw new UnsupportedOperationException("URL repository is read only"); - } - - /** - * {@inheritDoc} - */ @Override public void close() { // nothing to do here... diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index 200c9aa4bbeb4..71585ea7a4e8e 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -59,7 +59,7 @@ public void testLoggingHandler() throws IllegalAccessException { ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + " WRITE: \\d+B"; final MockLogAppender.LoggingExpectation writeExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); final MockLogAppender.LoggingExpectation flushExpectation = @@ -74,7 +74,7 @@ public void testLoggingHandler() throws IllegalAccessException { " READ: \\d+B"; final MockLogAppender.LoggingExpectation readExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); appender.addExpectation(writeExpectation); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java index 66746854c61f4..31484c8ade76e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java @@ -22,10 +22,8 @@ import com.ibm.icu.text.FilteredNormalizer2; import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.UnicodeSet; - import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -59,14 +57,7 @@ public TokenStream create(TokenStream tokenStream) { static Normalizer2 wrapWithUnicodeSetFilter(final IndexSettings indexSettings, final Normalizer2 normalizer, final Settings settings) { - String unicodeSetFilter = settings.get("unicodeSetFilter"); - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - if (unicodeSetFilter != null) { - deprecationLogger.deprecated("[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"); - } else { - unicodeSetFilter = settings.get("unicode_set_filter"); - } - } + String unicodeSetFilter = settings.get("unicode_set_filter"); if (unicodeSetFilter != null) { UnicodeSet unicodeSet = new UnicodeSet(unicodeSetFilter); diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml index da95501c05d21..a4642078941ac 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml @@ -103,45 +103,3 @@ - match: { tokens.1.token: foo } - match: { tokens.2.token: bâr } - match: { tokens.3.token: russ } - ---- -"Normalization with deprecated unicodeSetFilter": - - skip: - reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter - features: "warnings" - - - do: - warnings: - - "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]" - indices.create: - index: test - body: - settings: - index: - analysis: - char_filter: - charfilter_icu_normalizer: - type: icu_normalizer - unicodeSetFilter: "[^ß]" - filter: - tokenfilter_icu_normalizer: - type: icu_normalizer - unicodeSetFilter: "[^ßB]" - tokenfilter_icu_folding: - type: icu_folding - unicodeSetFilter: "[^â]" - - do: - warnings: - - "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]" - indices.analyze: - index: test - body: - char_filter: ["charfilter_icu_normalizer"] - tokenizer: standard - text: charfilter Föo Bâr Ruß - - length: { tokens: 4 } - - match: { tokens.0.token: charfilter } - - match: { tokens.1.token: föo } - - match: { tokens.2.token: bâr } - - match: { tokens.3.token: ruß } - diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index 1022b826475be..ca0f315e7a4b8 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -36,7 +36,6 @@ import org.apache.lucene.analysis.phonetic.DaitchMokotoffSoundexFilter; import org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilter; import org.apache.lucene.analysis.phonetic.PhoneticFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -148,13 +147,6 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog("synonym_tokenfilters", "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticFactoryTests.java index 2092d63fd23fc..da117d018b84d 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticFactoryTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticFactoryTests.java @@ -60,18 +60,6 @@ public void testDisallowedWithSynonyms() throws IOException { = plugin.getTokenFilters().get("phonetic").get(idxSettings, null, "phonetic", settings); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, tff::getSynonymFilter); assertEquals("Token filter [phonetic] cannot be used to parse synonyms", e.getMessage()); - - settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), - Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0))) - .put("path.home", createTempDir().toString()) - .build(); - idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - - tff = plugin.getTokenFilters().get("phonetic").get(idxSettings, null, "phonetic", settings); - tff.getSynonymFilter(); - - assertWarnings("Token filter [phonetic] will not be usable to parse synonyms after v7.0"); } } diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureSeedHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureSeedHostsProvider.java index d6b5a85b51f14..4c527264e23fc 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureSeedHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureSeedHostsProvider.java @@ -208,8 +208,7 @@ public List getSeedAddresses(HostsResolver hostsResolver) { } try { - // we only limit to 1 port per address, makes no sense to ping 100 ports - TransportAddress[] addresses = transportService.addressesFromString(networkAddress, 1); + TransportAddress[] addresses = transportService.addressesFromString(networkAddress); for (TransportAddress address : addresses) { logger.trace("adding {}, transport_address {}", networkAddress, address); dynamicHosts.add(address); diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 1ee2e2490727b..82347ca2bf1e4 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -25,6 +25,7 @@ import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; import org.apache.logging.log4j.LogManager; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; @@ -59,7 +60,9 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.security.AccessController; import java.security.KeyStore; +import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -262,11 +265,28 @@ private static SSLContext getSSLContext() throws Exception { kmf.init(ks, passphrase); TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); tmf.init(ks); - SSLContext ssl = SSLContext.getInstance("TLS"); + SSLContext ssl = SSLContext.getInstance(getProtocol()); ssl.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); return ssl; } + /** + * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to + * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK + */ + private static String getProtocol() { + if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + return "TLSv1.2"; + } else { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + return "TLSv1.2"; + } + } + return "TLS"; + } + @AfterClass public static void stopHttpd() throws IOException { for (int i = 0; i < internalCluster().size(); i++) { diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 16750a9788ae9..50dc6ac5d85b5 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'aws': '1.11.187' + 'aws': '1.11.505' ] dependencies { diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.187.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.187.jar.sha1 deleted file mode 100644 index a5293a9bf6580..0000000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.187.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6f47fcd3c2917bef69dc36aba203c5ea4af9bf24 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.505.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.505.jar.sha1 new file mode 100644 index 0000000000000..add5db290e8a8 --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.505.jar.sha1 @@ -0,0 +1 @@ +d19328c227b2b5ad81d137361ebc9cbcd0396465 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.187.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.187.jar.sha1 deleted file mode 100644 index 4602436e08182..0000000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.187.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f3e5a8601f3105624674b1a12ca34f453a4b5895 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.505.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.505.jar.sha1 new file mode 100644 index 0000000000000..857f0888de3aa --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.505.jar.sha1 @@ -0,0 +1 @@ +b669b3c90ea9bf73734ab26f0cb30c5c66addf55 \ No newline at end of file diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index 209ab3278398d..8397549f384fe 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -20,6 +20,7 @@ import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.test.AntFixture +import org.elasticsearch.gradle.test.RestIntegTestTask apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' @@ -30,14 +31,6 @@ dependencies { final int ec2NumberOfNodes = 3 -/** A task to start the AmazonEC2Fixture which emulates an EC2 service **/ -task ec2Fixture(type: AntFixture) { - dependsOn compileTestJava - env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" - executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest-1/config/unicast_hosts.txt" -} - Map expansions = [ 'expected_nodes': ec2NumberOfNodes ] @@ -47,20 +40,71 @@ processTestResources { MavenFilteringHack.filter(it, expansions) } -integTest { - dependsOn ec2Fixture, project(':plugins:discovery-ec2').bundlePlugin -} +// disable default test task, use spezialized ones below +integTest.enabled = false + +/* + * Test using various credential providers (see also https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/credentials.html): + * - Elasticsearch Keystore (secure settings discovery.ec2.access_key and discovery.ec2.secret_key) + * - Java system properties (aws.accessKeyId and aws.secretAccessKey) + * - Environment variables (AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY) + * - ECS container credentials (loaded from ECS if the environment variable AWS_CONTAINER_CREDENTIALS_RELATIVE_URI is set) + * - Instance profile credentials (delivered through the EC2 metadata service) + * + * Notably missing is a test for the default credential profiles file, which is located at ~/.aws/credentials and would at least require a + * custom Java security policy to work. + */ +['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> + AntFixture fixture = tasks.create(name: "ec2Fixture${action}", type: AntFixture) { + dependsOn compileTestJava + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" + executable = new File(project.runtimeJavaHome, 'bin/java') + args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/integTest${action}-1/config/unicast_hosts.txt" + } + + tasks.create(name: "integTest${action}", type: RestIntegTestTask) { + dependsOn fixture, project(':plugins:discovery-ec2').bundlePlugin + } + + check.dependsOn("integTest${action}") + + testClusters."integTest${action}" { + numberOfNodes = ec2NumberOfNodes + plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile) -testClusters.integTest { - numberOfNodes = ec2NumberOfNodes - plugin file(project(':plugins:discovery-ec2').bundlePlugin.archiveFile) + setting 'discovery.seed_providers', 'ec2' + setting 'network.host', '_ec2_' + setting 'discovery.ec2.endpoint', { "http://${-> fixture.addressAndPort}" } + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.addressAndPort}" } + } +} + +// Extra config for KeyStore +testClusters.integTestKeyStore { keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key' keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key' - - setting 'discovery.seed_providers', 'ec2' - setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${ec2Fixture.addressAndPort}" } - - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${ec2Fixture.addressAndPort}" } } + +// Extra config for EnvVariables +testClusters.integTestEnvVariables { + environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key' + environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key' +} + +// Extra config for SystemProperties +testClusters.integTestSystemProperties { + systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key' + systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key' +} + +// Extra config for ContainerCredentials +ec2FixtureContainerCredentials.env 'ACTIVATE_CONTAINER_CREDENTIALS', true + +testClusters.integTestContainerCredentials { + environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" } +} + +// Extra config for InstanceProfile +ec2FixtureInstanceProfile.env 'ACTIVATE_INSTANCE_PROFILE', true diff --git a/plugins/discovery-ec2/qa/amazon-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java b/plugins/discovery-ec2/qa/amazon-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java index 6027bd861590e..32abcdc43e645 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java +++ b/plugins/discovery-ec2/qa/amazon-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java @@ -18,10 +18,12 @@ */ package org.elasticsearch.discovery.ec2; +import com.amazonaws.util.DateUtils; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URLEncodedUtils; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.fixture.AbstractHttpFixture; @@ -34,8 +36,12 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; import java.util.UUID; +import java.util.concurrent.TimeUnit; import static java.nio.charset.StandardCharsets.UTF_8; @@ -45,10 +51,14 @@ public class AmazonEC2Fixture extends AbstractHttpFixture { private final Path nodes; + private final boolean instanceProfile; + private final boolean containerCredentials; - private AmazonEC2Fixture(final String workingDir, final String nodesUriPath) { + private AmazonEC2Fixture(final String workingDir, final String nodesUriPath, boolean instanceProfile, boolean containerCredentials) { super(workingDir); this.nodes = toPath(Objects.requireNonNull(nodesUriPath)); + this.instanceProfile = instanceProfile; + this.containerCredentials = containerCredentials; } public static void main(String[] args) throws Exception { @@ -56,7 +66,10 @@ public static void main(String[] args) throws Exception { throw new IllegalArgumentException("AmazonEC2Fixture "); } - final AmazonEC2Fixture fixture = new AmazonEC2Fixture(args[0], args[1]); + boolean instanceProfile = Booleans.parseBoolean(System.getenv("ACTIVATE_INSTANCE_PROFILE"), false); + boolean containerCredentials = Booleans.parseBoolean(System.getenv("ACTIVATE_CONTAINER_CREDENTIALS"), false); + + final AmazonEC2Fixture fixture = new AmazonEC2Fixture(args[0], args[1], instanceProfile, containerCredentials); fixture.listen(); } @@ -65,6 +78,12 @@ protected Response handle(final Request request) throws IOException { if ("/".equals(request.getPath()) && (HttpPost.METHOD_NAME.equals(request.getMethod()))) { final String userAgent = request.getHeader("User-Agent"); if (userAgent != null && userAgent.startsWith("aws-sdk-java")) { + + final String auth = request.getHeader("Authorization"); + if (auth == null || auth.contains("ec2_integration_test_access_key") == false) { + throw new IllegalArgumentException("wrong access key: " + auth); + } + // Simulate an EC2 DescribeInstancesResponse byte[] responseBody = EMPTY_BYTE; for (NameValuePair parse : URLEncodedUtils.parse(new String(request.getBody(), UTF_8), UTF_8)) { @@ -79,6 +98,32 @@ protected Response handle(final Request request) throws IOException { if ("/latest/meta-data/local-ipv4".equals(request.getPath()) && (HttpGet.METHOD_NAME.equals(request.getMethod()))) { return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, "127.0.0.1".getBytes(UTF_8)); } + + if (instanceProfile && + "/latest/meta-data/iam/security-credentials/".equals(request.getPath()) && + HttpGet.METHOD_NAME.equals(request.getMethod())) { + final Map headers = new HashMap<>(contentType("text/plain")); + return new Response(RestStatus.OK.getStatus(), headers, "my_iam_profile".getBytes(UTF_8)); + } + + if ((containerCredentials && + "/ecs_credentials_endpoint".equals(request.getPath()) && + HttpGet.METHOD_NAME.equals(request.getMethod())) || + ("/latest/meta-data/iam/security-credentials/my_iam_profile".equals(request.getPath()) && + HttpGet.METHOD_NAME.equals(request.getMethod()))) { + final Date expiration = new Date(new Date().getTime() + TimeUnit.DAYS.toMillis(1)); + final String response = "{" + + "\"AccessKeyId\": \"" + "ec2_integration_test_access_key" + "\"," + + "\"Expiration\": \"" + DateUtils.formatISO8601Date(expiration) + "\"," + + "\"RoleArn\": \"" + "test" + "\"," + + "\"SecretAccessKey\": \"" + "test" + "\"," + + "\"Token\": \"" + "test" + "\"" + + "}"; + + final Map headers = new HashMap<>(contentType("application/json")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + } + return null; } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2SeedHostsProvider.java index 97b7ade49f00c..515aef8408b01 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2SeedHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2SeedHostsProvider.java @@ -174,8 +174,7 @@ && disjoint(securityGroupIds, groups)) { } if (address != null) { try { - // we only limit to 1 port per address, makes no sense to ping 100 ports - final TransportAddress[] addresses = transportService.addressesFromString(address, 1); + final TransportAddress[] addresses = transportService.addressesFromString(address); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); dynamicHosts.add(addresses[i]); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index 5f384c049124e..739b964925c3e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -22,13 +22,12 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.http.IdleConnectionReaper; -import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; @@ -97,11 +96,11 @@ static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings c static AWSCredentialsProvider buildCredentials(Logger logger, Ec2ClientSettings clientSettings) { final AWSCredentials credentials = clientSettings.credentials; if (credentials == null) { - logger.debug("Using either environment variables, system properties or instance profile credentials"); - return new DefaultAWSCredentialsProviderChain(); + logger.debug("Using default provider chain"); + return DefaultAWSCredentialsProviderChain.getInstance(); } else { logger.debug("Using basic key/secret credentials"); - return new StaticCredentialsProvider(credentials); + return new AWSStaticCredentialsProvider(credentials); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index f1c373ee33a5a..3135769df5f46 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -20,37 +20,48 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; -import com.amazonaws.AmazonServiceException; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.ResponseMetadata; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; +import com.amazonaws.services.ec2.model.AcceptReservedInstancesExchangeQuoteRequest; +import com.amazonaws.services.ec2.model.AcceptReservedInstancesExchangeQuoteResult; +import com.amazonaws.services.ec2.model.AcceptTransitGatewayVpcAttachmentRequest; +import com.amazonaws.services.ec2.model.AcceptTransitGatewayVpcAttachmentResult; +import com.amazonaws.services.ec2.model.AcceptVpcEndpointConnectionsRequest; +import com.amazonaws.services.ec2.model.AcceptVpcEndpointConnectionsResult; import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionResult; +import com.amazonaws.services.ec2.model.AdvertiseByoipCidrRequest; +import com.amazonaws.services.ec2.model.AdvertiseByoipCidrResult; import com.amazonaws.services.ec2.model.AllocateAddressRequest; import com.amazonaws.services.ec2.model.AllocateAddressResult; import com.amazonaws.services.ec2.model.AllocateHostsRequest; import com.amazonaws.services.ec2.model.AllocateHostsResult; -import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesRequest; -import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesResult; +import com.amazonaws.services.ec2.model.ApplySecurityGroupsToClientVpnTargetNetworkRequest; +import com.amazonaws.services.ec2.model.ApplySecurityGroupsToClientVpnTargetNetworkResult; import com.amazonaws.services.ec2.model.AssignIpv6AddressesRequest; import com.amazonaws.services.ec2.model.AssignIpv6AddressesResult; +import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesRequest; +import com.amazonaws.services.ec2.model.AssignPrivateIpAddressesResult; import com.amazonaws.services.ec2.model.AssociateAddressRequest; import com.amazonaws.services.ec2.model.AssociateAddressResult; -import com.amazonaws.services.ec2.model.AssociateVpcCidrBlockRequest; -import com.amazonaws.services.ec2.model.AssociateVpcCidrBlockResult; -import com.amazonaws.services.ec2.model.AssociateSubnetCidrBlockRequest; -import com.amazonaws.services.ec2.model.AssociateSubnetCidrBlockResult; -import com.amazonaws.services.ec2.model.AssociateIamInstanceProfileRequest; -import com.amazonaws.services.ec2.model.AssociateIamInstanceProfileResult; -import com.amazonaws.services.ec2.model.AcceptReservedInstancesExchangeQuoteRequest; -import com.amazonaws.services.ec2.model.AcceptReservedInstancesExchangeQuoteResult; +import com.amazonaws.services.ec2.model.AssociateClientVpnTargetNetworkRequest; +import com.amazonaws.services.ec2.model.AssociateClientVpnTargetNetworkResult; import com.amazonaws.services.ec2.model.AssociateDhcpOptionsRequest; import com.amazonaws.services.ec2.model.AssociateDhcpOptionsResult; +import com.amazonaws.services.ec2.model.AssociateIamInstanceProfileRequest; +import com.amazonaws.services.ec2.model.AssociateIamInstanceProfileResult; import com.amazonaws.services.ec2.model.AssociateRouteTableRequest; import com.amazonaws.services.ec2.model.AssociateRouteTableResult; +import com.amazonaws.services.ec2.model.AssociateSubnetCidrBlockRequest; +import com.amazonaws.services.ec2.model.AssociateSubnetCidrBlockResult; +import com.amazonaws.services.ec2.model.AssociateTransitGatewayRouteTableRequest; +import com.amazonaws.services.ec2.model.AssociateTransitGatewayRouteTableResult; +import com.amazonaws.services.ec2.model.AssociateVpcCidrBlockRequest; +import com.amazonaws.services.ec2.model.AssociateVpcCidrBlockResult; import com.amazonaws.services.ec2.model.AttachClassicLinkVpcRequest; import com.amazonaws.services.ec2.model.AttachClassicLinkVpcResult; import com.amazonaws.services.ec2.model.AttachInternetGatewayRequest; @@ -61,6 +72,8 @@ import com.amazonaws.services.ec2.model.AttachVolumeResult; import com.amazonaws.services.ec2.model.AttachVpnGatewayRequest; import com.amazonaws.services.ec2.model.AttachVpnGatewayResult; +import com.amazonaws.services.ec2.model.AuthorizeClientVpnIngressRequest; +import com.amazonaws.services.ec2.model.AuthorizeClientVpnIngressResult; import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupEgressRequest; import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupEgressResult; import com.amazonaws.services.ec2.model.AuthorizeSecurityGroupIngressRequest; @@ -69,6 +82,8 @@ import com.amazonaws.services.ec2.model.BundleInstanceResult; import com.amazonaws.services.ec2.model.CancelBundleTaskRequest; import com.amazonaws.services.ec2.model.CancelBundleTaskResult; +import com.amazonaws.services.ec2.model.CancelCapacityReservationRequest; +import com.amazonaws.services.ec2.model.CancelCapacityReservationResult; import com.amazonaws.services.ec2.model.CancelConversionTaskRequest; import com.amazonaws.services.ec2.model.CancelConversionTaskResult; import com.amazonaws.services.ec2.model.CancelExportTaskRequest; @@ -83,24 +98,34 @@ import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsResult; import com.amazonaws.services.ec2.model.ConfirmProductInstanceRequest; import com.amazonaws.services.ec2.model.ConfirmProductInstanceResult; +import com.amazonaws.services.ec2.model.CopyFpgaImageRequest; +import com.amazonaws.services.ec2.model.CopyFpgaImageResult; import com.amazonaws.services.ec2.model.CopyImageRequest; import com.amazonaws.services.ec2.model.CopyImageResult; import com.amazonaws.services.ec2.model.CopySnapshotRequest; import com.amazonaws.services.ec2.model.CopySnapshotResult; +import com.amazonaws.services.ec2.model.CreateCapacityReservationRequest; +import com.amazonaws.services.ec2.model.CreateCapacityReservationResult; +import com.amazonaws.services.ec2.model.CreateClientVpnEndpointRequest; +import com.amazonaws.services.ec2.model.CreateClientVpnEndpointResult; +import com.amazonaws.services.ec2.model.CreateClientVpnRouteRequest; +import com.amazonaws.services.ec2.model.CreateClientVpnRouteResult; import com.amazonaws.services.ec2.model.CreateCustomerGatewayRequest; -import com.amazonaws.services.ec2.model.CreateDefaultVpcResult; -import com.amazonaws.services.ec2.model.CreateDefaultVpcRequest; import com.amazonaws.services.ec2.model.CreateCustomerGatewayResult; +import com.amazonaws.services.ec2.model.CreateDefaultSubnetRequest; +import com.amazonaws.services.ec2.model.CreateDefaultSubnetResult; +import com.amazonaws.services.ec2.model.CreateDefaultVpcRequest; +import com.amazonaws.services.ec2.model.CreateDefaultVpcResult; import com.amazonaws.services.ec2.model.CreateDhcpOptionsRequest; import com.amazonaws.services.ec2.model.CreateDhcpOptionsResult; import com.amazonaws.services.ec2.model.CreateEgressOnlyInternetGatewayRequest; import com.amazonaws.services.ec2.model.CreateEgressOnlyInternetGatewayResult; -import com.amazonaws.services.ec2.model.CreateFpgaImageRequest; -import com.amazonaws.services.ec2.model.CreateFpgaImageResult; -import com.amazonaws.services.ec2.model.CreateNetworkInterfacePermissionRequest; -import com.amazonaws.services.ec2.model.CreateNetworkInterfacePermissionResult; +import com.amazonaws.services.ec2.model.CreateFleetRequest; +import com.amazonaws.services.ec2.model.CreateFleetResult; import com.amazonaws.services.ec2.model.CreateFlowLogsRequest; import com.amazonaws.services.ec2.model.CreateFlowLogsResult; +import com.amazonaws.services.ec2.model.CreateFpgaImageRequest; +import com.amazonaws.services.ec2.model.CreateFpgaImageResult; import com.amazonaws.services.ec2.model.CreateImageRequest; import com.amazonaws.services.ec2.model.CreateImageResult; import com.amazonaws.services.ec2.model.CreateInstanceExportTaskRequest; @@ -109,12 +134,18 @@ import com.amazonaws.services.ec2.model.CreateInternetGatewayResult; import com.amazonaws.services.ec2.model.CreateKeyPairRequest; import com.amazonaws.services.ec2.model.CreateKeyPairResult; +import com.amazonaws.services.ec2.model.CreateLaunchTemplateRequest; +import com.amazonaws.services.ec2.model.CreateLaunchTemplateResult; +import com.amazonaws.services.ec2.model.CreateLaunchTemplateVersionRequest; +import com.amazonaws.services.ec2.model.CreateLaunchTemplateVersionResult; import com.amazonaws.services.ec2.model.CreateNatGatewayRequest; import com.amazonaws.services.ec2.model.CreateNatGatewayResult; import com.amazonaws.services.ec2.model.CreateNetworkAclEntryRequest; import com.amazonaws.services.ec2.model.CreateNetworkAclEntryResult; import com.amazonaws.services.ec2.model.CreateNetworkAclRequest; import com.amazonaws.services.ec2.model.CreateNetworkAclResult; +import com.amazonaws.services.ec2.model.CreateNetworkInterfacePermissionRequest; +import com.amazonaws.services.ec2.model.CreateNetworkInterfacePermissionResult; import com.amazonaws.services.ec2.model.CreateNetworkInterfaceRequest; import com.amazonaws.services.ec2.model.CreateNetworkInterfaceResult; import com.amazonaws.services.ec2.model.CreatePlacementGroupRequest; @@ -135,10 +166,22 @@ import com.amazonaws.services.ec2.model.CreateSubnetResult; import com.amazonaws.services.ec2.model.CreateTagsRequest; import com.amazonaws.services.ec2.model.CreateTagsResult; +import com.amazonaws.services.ec2.model.CreateTransitGatewayRequest; +import com.amazonaws.services.ec2.model.CreateTransitGatewayResult; +import com.amazonaws.services.ec2.model.CreateTransitGatewayRouteRequest; +import com.amazonaws.services.ec2.model.CreateTransitGatewayRouteResult; +import com.amazonaws.services.ec2.model.CreateTransitGatewayRouteTableRequest; +import com.amazonaws.services.ec2.model.CreateTransitGatewayRouteTableResult; +import com.amazonaws.services.ec2.model.CreateTransitGatewayVpcAttachmentRequest; +import com.amazonaws.services.ec2.model.CreateTransitGatewayVpcAttachmentResult; import com.amazonaws.services.ec2.model.CreateVolumeRequest; import com.amazonaws.services.ec2.model.CreateVolumeResult; +import com.amazonaws.services.ec2.model.CreateVpcEndpointConnectionNotificationRequest; +import com.amazonaws.services.ec2.model.CreateVpcEndpointConnectionNotificationResult; import com.amazonaws.services.ec2.model.CreateVpcEndpointRequest; import com.amazonaws.services.ec2.model.CreateVpcEndpointResult; +import com.amazonaws.services.ec2.model.CreateVpcEndpointServiceConfigurationRequest; +import com.amazonaws.services.ec2.model.CreateVpcEndpointServiceConfigurationResult; import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionRequest; import com.amazonaws.services.ec2.model.CreateVpcPeeringConnectionResult; import com.amazonaws.services.ec2.model.CreateVpcRequest; @@ -149,26 +192,38 @@ import com.amazonaws.services.ec2.model.CreateVpnConnectionRouteResult; import com.amazonaws.services.ec2.model.CreateVpnGatewayRequest; import com.amazonaws.services.ec2.model.CreateVpnGatewayResult; +import com.amazonaws.services.ec2.model.DeleteClientVpnEndpointRequest; +import com.amazonaws.services.ec2.model.DeleteClientVpnEndpointResult; +import com.amazonaws.services.ec2.model.DeleteClientVpnRouteRequest; +import com.amazonaws.services.ec2.model.DeleteClientVpnRouteResult; import com.amazonaws.services.ec2.model.DeleteCustomerGatewayRequest; import com.amazonaws.services.ec2.model.DeleteCustomerGatewayResult; import com.amazonaws.services.ec2.model.DeleteDhcpOptionsRequest; import com.amazonaws.services.ec2.model.DeleteDhcpOptionsResult; import com.amazonaws.services.ec2.model.DeleteEgressOnlyInternetGatewayRequest; import com.amazonaws.services.ec2.model.DeleteEgressOnlyInternetGatewayResult; -import com.amazonaws.services.ec2.model.DeleteNetworkInterfacePermissionRequest; -import com.amazonaws.services.ec2.model.DeleteNetworkInterfacePermissionResult; +import com.amazonaws.services.ec2.model.DeleteFleetsRequest; +import com.amazonaws.services.ec2.model.DeleteFleetsResult; import com.amazonaws.services.ec2.model.DeleteFlowLogsRequest; import com.amazonaws.services.ec2.model.DeleteFlowLogsResult; +import com.amazonaws.services.ec2.model.DeleteFpgaImageRequest; +import com.amazonaws.services.ec2.model.DeleteFpgaImageResult; import com.amazonaws.services.ec2.model.DeleteInternetGatewayRequest; import com.amazonaws.services.ec2.model.DeleteInternetGatewayResult; import com.amazonaws.services.ec2.model.DeleteKeyPairRequest; import com.amazonaws.services.ec2.model.DeleteKeyPairResult; +import com.amazonaws.services.ec2.model.DeleteLaunchTemplateRequest; +import com.amazonaws.services.ec2.model.DeleteLaunchTemplateResult; +import com.amazonaws.services.ec2.model.DeleteLaunchTemplateVersionsRequest; +import com.amazonaws.services.ec2.model.DeleteLaunchTemplateVersionsResult; import com.amazonaws.services.ec2.model.DeleteNatGatewayRequest; import com.amazonaws.services.ec2.model.DeleteNatGatewayResult; import com.amazonaws.services.ec2.model.DeleteNetworkAclEntryRequest; import com.amazonaws.services.ec2.model.DeleteNetworkAclEntryResult; import com.amazonaws.services.ec2.model.DeleteNetworkAclRequest; import com.amazonaws.services.ec2.model.DeleteNetworkAclResult; +import com.amazonaws.services.ec2.model.DeleteNetworkInterfacePermissionRequest; +import com.amazonaws.services.ec2.model.DeleteNetworkInterfacePermissionResult; import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceRequest; import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceResult; import com.amazonaws.services.ec2.model.DeletePlacementGroupRequest; @@ -187,8 +242,20 @@ import com.amazonaws.services.ec2.model.DeleteSubnetResult; import com.amazonaws.services.ec2.model.DeleteTagsRequest; import com.amazonaws.services.ec2.model.DeleteTagsResult; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayRequest; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayResult; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayRouteRequest; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayRouteResult; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayRouteTableRequest; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayRouteTableResult; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayVpcAttachmentRequest; +import com.amazonaws.services.ec2.model.DeleteTransitGatewayVpcAttachmentResult; import com.amazonaws.services.ec2.model.DeleteVolumeRequest; import com.amazonaws.services.ec2.model.DeleteVolumeResult; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointConnectionNotificationsRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointConnectionNotificationsResult; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointServiceConfigurationsRequest; +import com.amazonaws.services.ec2.model.DeleteVpcEndpointServiceConfigurationsResult; import com.amazonaws.services.ec2.model.DeleteVpcEndpointsRequest; import com.amazonaws.services.ec2.model.DeleteVpcEndpointsResult; import com.amazonaws.services.ec2.model.DeleteVpcPeeringConnectionRequest; @@ -201,18 +268,36 @@ import com.amazonaws.services.ec2.model.DeleteVpnConnectionRouteResult; import com.amazonaws.services.ec2.model.DeleteVpnGatewayRequest; import com.amazonaws.services.ec2.model.DeleteVpnGatewayResult; +import com.amazonaws.services.ec2.model.DeprovisionByoipCidrRequest; +import com.amazonaws.services.ec2.model.DeprovisionByoipCidrResult; import com.amazonaws.services.ec2.model.DeregisterImageRequest; import com.amazonaws.services.ec2.model.DeregisterImageResult; import com.amazonaws.services.ec2.model.DescribeAccountAttributesRequest; import com.amazonaws.services.ec2.model.DescribeAccountAttributesResult; import com.amazonaws.services.ec2.model.DescribeAddressesRequest; import com.amazonaws.services.ec2.model.DescribeAddressesResult; +import com.amazonaws.services.ec2.model.DescribeAggregateIdFormatRequest; +import com.amazonaws.services.ec2.model.DescribeAggregateIdFormatResult; import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesRequest; import com.amazonaws.services.ec2.model.DescribeAvailabilityZonesResult; import com.amazonaws.services.ec2.model.DescribeBundleTasksRequest; import com.amazonaws.services.ec2.model.DescribeBundleTasksResult; +import com.amazonaws.services.ec2.model.DescribeByoipCidrsRequest; +import com.amazonaws.services.ec2.model.DescribeByoipCidrsResult; +import com.amazonaws.services.ec2.model.DescribeCapacityReservationsRequest; +import com.amazonaws.services.ec2.model.DescribeCapacityReservationsResult; import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesRequest; import com.amazonaws.services.ec2.model.DescribeClassicLinkInstancesResult; +import com.amazonaws.services.ec2.model.DescribeClientVpnAuthorizationRulesRequest; +import com.amazonaws.services.ec2.model.DescribeClientVpnAuthorizationRulesResult; +import com.amazonaws.services.ec2.model.DescribeClientVpnConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeClientVpnConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeClientVpnEndpointsRequest; +import com.amazonaws.services.ec2.model.DescribeClientVpnEndpointsResult; +import com.amazonaws.services.ec2.model.DescribeClientVpnRoutesRequest; +import com.amazonaws.services.ec2.model.DescribeClientVpnRoutesResult; +import com.amazonaws.services.ec2.model.DescribeClientVpnTargetNetworksRequest; +import com.amazonaws.services.ec2.model.DescribeClientVpnTargetNetworksResult; import com.amazonaws.services.ec2.model.DescribeConversionTasksRequest; import com.amazonaws.services.ec2.model.DescribeConversionTasksResult; import com.amazonaws.services.ec2.model.DescribeCustomerGatewaysRequest; @@ -221,26 +306,34 @@ import com.amazonaws.services.ec2.model.DescribeDhcpOptionsResult; import com.amazonaws.services.ec2.model.DescribeEgressOnlyInternetGatewaysRequest; import com.amazonaws.services.ec2.model.DescribeEgressOnlyInternetGatewaysResult; -import com.amazonaws.services.ec2.model.DescribeExportTasksRequest; -import com.amazonaws.services.ec2.model.DescribeExportTasksResult; import com.amazonaws.services.ec2.model.DescribeElasticGpusRequest; import com.amazonaws.services.ec2.model.DescribeElasticGpusResult; +import com.amazonaws.services.ec2.model.DescribeExportTasksRequest; +import com.amazonaws.services.ec2.model.DescribeExportTasksResult; +import com.amazonaws.services.ec2.model.DescribeFleetHistoryRequest; +import com.amazonaws.services.ec2.model.DescribeFleetHistoryResult; +import com.amazonaws.services.ec2.model.DescribeFleetInstancesRequest; +import com.amazonaws.services.ec2.model.DescribeFleetInstancesResult; +import com.amazonaws.services.ec2.model.DescribeFleetsRequest; +import com.amazonaws.services.ec2.model.DescribeFleetsResult; +import com.amazonaws.services.ec2.model.DescribeFlowLogsRequest; +import com.amazonaws.services.ec2.model.DescribeFlowLogsResult; +import com.amazonaws.services.ec2.model.DescribeFpgaImageAttributeRequest; +import com.amazonaws.services.ec2.model.DescribeFpgaImageAttributeResult; import com.amazonaws.services.ec2.model.DescribeFpgaImagesRequest; import com.amazonaws.services.ec2.model.DescribeFpgaImagesResult; import com.amazonaws.services.ec2.model.DescribeHostReservationOfferingsRequest; import com.amazonaws.services.ec2.model.DescribeHostReservationOfferingsResult; import com.amazonaws.services.ec2.model.DescribeHostReservationsRequest; import com.amazonaws.services.ec2.model.DescribeHostReservationsResult; -import com.amazonaws.services.ec2.model.DescribeIdentityIdFormatRequest; -import com.amazonaws.services.ec2.model.DescribeIdentityIdFormatResult; -import com.amazonaws.services.ec2.model.DescribeFlowLogsRequest; -import com.amazonaws.services.ec2.model.DescribeFlowLogsResult; import com.amazonaws.services.ec2.model.DescribeHostsRequest; import com.amazonaws.services.ec2.model.DescribeHostsResult; import com.amazonaws.services.ec2.model.DescribeIamInstanceProfileAssociationsRequest; import com.amazonaws.services.ec2.model.DescribeIamInstanceProfileAssociationsResult; import com.amazonaws.services.ec2.model.DescribeIdFormatRequest; import com.amazonaws.services.ec2.model.DescribeIdFormatResult; +import com.amazonaws.services.ec2.model.DescribeIdentityIdFormatRequest; +import com.amazonaws.services.ec2.model.DescribeIdentityIdFormatResult; import com.amazonaws.services.ec2.model.DescribeImageAttributeRequest; import com.amazonaws.services.ec2.model.DescribeImageAttributeResult; import com.amazonaws.services.ec2.model.DescribeImagesRequest; @@ -251,6 +344,8 @@ import com.amazonaws.services.ec2.model.DescribeImportSnapshotTasksResult; import com.amazonaws.services.ec2.model.DescribeInstanceAttributeRequest; import com.amazonaws.services.ec2.model.DescribeInstanceAttributeResult; +import com.amazonaws.services.ec2.model.DescribeInstanceCreditSpecificationsRequest; +import com.amazonaws.services.ec2.model.DescribeInstanceCreditSpecificationsResult; import com.amazonaws.services.ec2.model.DescribeInstanceStatusRequest; import com.amazonaws.services.ec2.model.DescribeInstanceStatusResult; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; @@ -259,6 +354,10 @@ import com.amazonaws.services.ec2.model.DescribeInternetGatewaysResult; import com.amazonaws.services.ec2.model.DescribeKeyPairsRequest; import com.amazonaws.services.ec2.model.DescribeKeyPairsResult; +import com.amazonaws.services.ec2.model.DescribeLaunchTemplateVersionsRequest; +import com.amazonaws.services.ec2.model.DescribeLaunchTemplateVersionsResult; +import com.amazonaws.services.ec2.model.DescribeLaunchTemplatesRequest; +import com.amazonaws.services.ec2.model.DescribeLaunchTemplatesResult; import com.amazonaws.services.ec2.model.DescribeMovingAddressesRequest; import com.amazonaws.services.ec2.model.DescribeMovingAddressesResult; import com.amazonaws.services.ec2.model.DescribeNatGatewaysRequest; @@ -267,14 +366,18 @@ import com.amazonaws.services.ec2.model.DescribeNetworkAclsResult; import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeRequest; import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeResult; -import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesRequest; -import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesResult; import com.amazonaws.services.ec2.model.DescribeNetworkInterfacePermissionsRequest; import com.amazonaws.services.ec2.model.DescribeNetworkInterfacePermissionsResult; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesRequest; +import com.amazonaws.services.ec2.model.DescribeNetworkInterfacesResult; import com.amazonaws.services.ec2.model.DescribePlacementGroupsRequest; import com.amazonaws.services.ec2.model.DescribePlacementGroupsResult; import com.amazonaws.services.ec2.model.DescribePrefixListsRequest; import com.amazonaws.services.ec2.model.DescribePrefixListsResult; +import com.amazonaws.services.ec2.model.DescribePrincipalIdFormatRequest; +import com.amazonaws.services.ec2.model.DescribePrincipalIdFormatResult; +import com.amazonaws.services.ec2.model.DescribePublicIpv4PoolsRequest; +import com.amazonaws.services.ec2.model.DescribePublicIpv4PoolsResult; import com.amazonaws.services.ec2.model.DescribeRegionsRequest; import com.amazonaws.services.ec2.model.DescribeRegionsResult; import com.amazonaws.services.ec2.model.DescribeReservedInstancesListingsRequest; @@ -291,12 +394,10 @@ import com.amazonaws.services.ec2.model.DescribeScheduledInstanceAvailabilityResult; import com.amazonaws.services.ec2.model.DescribeScheduledInstancesRequest; import com.amazonaws.services.ec2.model.DescribeScheduledInstancesResult; -import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; -import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; -import com.amazonaws.services.ec2.model.DescribeStaleSecurityGroupsRequest; -import com.amazonaws.services.ec2.model.DescribeStaleSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSecurityGroupReferencesRequest; import com.amazonaws.services.ec2.model.DescribeSecurityGroupReferencesResult; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; +import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeRequest; import com.amazonaws.services.ec2.model.DescribeSnapshotAttributeResult; import com.amazonaws.services.ec2.model.DescribeSnapshotsRequest; @@ -313,10 +414,20 @@ import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsResult; import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryRequest; import com.amazonaws.services.ec2.model.DescribeSpotPriceHistoryResult; +import com.amazonaws.services.ec2.model.DescribeStaleSecurityGroupsRequest; +import com.amazonaws.services.ec2.model.DescribeStaleSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSubnetsRequest; import com.amazonaws.services.ec2.model.DescribeSubnetsResult; import com.amazonaws.services.ec2.model.DescribeTagsRequest; import com.amazonaws.services.ec2.model.DescribeTagsResult; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayAttachmentsRequest; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayAttachmentsResult; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayRouteTablesRequest; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayRouteTablesResult; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayVpcAttachmentsRequest; +import com.amazonaws.services.ec2.model.DescribeTransitGatewayVpcAttachmentsResult; +import com.amazonaws.services.ec2.model.DescribeTransitGatewaysRequest; +import com.amazonaws.services.ec2.model.DescribeTransitGatewaysResult; import com.amazonaws.services.ec2.model.DescribeVolumeAttributeRequest; import com.amazonaws.services.ec2.model.DescribeVolumeAttributeResult; import com.amazonaws.services.ec2.model.DescribeVolumeStatusRequest; @@ -331,6 +442,14 @@ import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkDnsSupportResult; import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkRequest; import com.amazonaws.services.ec2.model.DescribeVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointConnectionNotificationsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointConnectionNotificationsResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointConnectionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointConnectionsResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServiceConfigurationsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServiceConfigurationsResult; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicePermissionsRequest; +import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicePermissionsResult; import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesRequest; import com.amazonaws.services.ec2.model.DescribeVpcEndpointServicesResult; import com.amazonaws.services.ec2.model.DescribeVpcEndpointsRequest; @@ -353,6 +472,8 @@ import com.amazonaws.services.ec2.model.DetachVolumeResult; import com.amazonaws.services.ec2.model.DetachVpnGatewayRequest; import com.amazonaws.services.ec2.model.DetachVpnGatewayResult; +import com.amazonaws.services.ec2.model.DisableTransitGatewayRouteTablePropagationRequest; +import com.amazonaws.services.ec2.model.DisableTransitGatewayRouteTablePropagationResult; import com.amazonaws.services.ec2.model.DisableVgwRoutePropagationRequest; import com.amazonaws.services.ec2.model.DisableVgwRoutePropagationResult; import com.amazonaws.services.ec2.model.DisableVpcClassicLinkDnsSupportRequest; @@ -361,16 +482,22 @@ import com.amazonaws.services.ec2.model.DisableVpcClassicLinkResult; import com.amazonaws.services.ec2.model.DisassociateAddressRequest; import com.amazonaws.services.ec2.model.DisassociateAddressResult; -import com.amazonaws.services.ec2.model.DisassociateRouteTableRequest; -import com.amazonaws.services.ec2.model.DisassociateRouteTableResult; +import com.amazonaws.services.ec2.model.DisassociateClientVpnTargetNetworkRequest; +import com.amazonaws.services.ec2.model.DisassociateClientVpnTargetNetworkResult; import com.amazonaws.services.ec2.model.DisassociateIamInstanceProfileRequest; import com.amazonaws.services.ec2.model.DisassociateIamInstanceProfileResult; -import com.amazonaws.services.ec2.model.DisassociateVpcCidrBlockRequest; -import com.amazonaws.services.ec2.model.DisassociateVpcCidrBlockResult; +import com.amazonaws.services.ec2.model.DisassociateRouteTableRequest; +import com.amazonaws.services.ec2.model.DisassociateRouteTableResult; import com.amazonaws.services.ec2.model.DisassociateSubnetCidrBlockRequest; import com.amazonaws.services.ec2.model.DisassociateSubnetCidrBlockResult; +import com.amazonaws.services.ec2.model.DisassociateTransitGatewayRouteTableRequest; +import com.amazonaws.services.ec2.model.DisassociateTransitGatewayRouteTableResult; +import com.amazonaws.services.ec2.model.DisassociateVpcCidrBlockRequest; +import com.amazonaws.services.ec2.model.DisassociateVpcCidrBlockResult; import com.amazonaws.services.ec2.model.DryRunResult; import com.amazonaws.services.ec2.model.DryRunSupportedRequest; +import com.amazonaws.services.ec2.model.EnableTransitGatewayRouteTablePropagationRequest; +import com.amazonaws.services.ec2.model.EnableTransitGatewayRouteTablePropagationResult; import com.amazonaws.services.ec2.model.EnableVgwRoutePropagationRequest; import com.amazonaws.services.ec2.model.EnableVgwRoutePropagationResult; import com.amazonaws.services.ec2.model.EnableVolumeIORequest; @@ -379,6 +506,12 @@ import com.amazonaws.services.ec2.model.EnableVpcClassicLinkDnsSupportResult; import com.amazonaws.services.ec2.model.EnableVpcClassicLinkRequest; import com.amazonaws.services.ec2.model.EnableVpcClassicLinkResult; +import com.amazonaws.services.ec2.model.ExportClientVpnClientCertificateRevocationListRequest; +import com.amazonaws.services.ec2.model.ExportClientVpnClientCertificateRevocationListResult; +import com.amazonaws.services.ec2.model.ExportClientVpnClientConfigurationRequest; +import com.amazonaws.services.ec2.model.ExportClientVpnClientConfigurationResult; +import com.amazonaws.services.ec2.model.ExportTransitGatewayRoutesRequest; +import com.amazonaws.services.ec2.model.ExportTransitGatewayRoutesResult; import com.amazonaws.services.ec2.model.Filter; import com.amazonaws.services.ec2.model.GetConsoleOutputRequest; import com.amazonaws.services.ec2.model.GetConsoleOutputResult; @@ -386,10 +519,20 @@ import com.amazonaws.services.ec2.model.GetConsoleScreenshotResult; import com.amazonaws.services.ec2.model.GetHostReservationPurchasePreviewRequest; import com.amazonaws.services.ec2.model.GetHostReservationPurchasePreviewResult; +import com.amazonaws.services.ec2.model.GetLaunchTemplateDataRequest; +import com.amazonaws.services.ec2.model.GetLaunchTemplateDataResult; import com.amazonaws.services.ec2.model.GetPasswordDataRequest; import com.amazonaws.services.ec2.model.GetPasswordDataResult; import com.amazonaws.services.ec2.model.GetReservedInstancesExchangeQuoteRequest; import com.amazonaws.services.ec2.model.GetReservedInstancesExchangeQuoteResult; +import com.amazonaws.services.ec2.model.GetTransitGatewayAttachmentPropagationsRequest; +import com.amazonaws.services.ec2.model.GetTransitGatewayAttachmentPropagationsResult; +import com.amazonaws.services.ec2.model.GetTransitGatewayRouteTableAssociationsRequest; +import com.amazonaws.services.ec2.model.GetTransitGatewayRouteTableAssociationsResult; +import com.amazonaws.services.ec2.model.GetTransitGatewayRouteTablePropagationsRequest; +import com.amazonaws.services.ec2.model.GetTransitGatewayRouteTablePropagationsResult; +import com.amazonaws.services.ec2.model.ImportClientVpnClientCertificateRevocationListRequest; +import com.amazonaws.services.ec2.model.ImportClientVpnClientCertificateRevocationListResult; import com.amazonaws.services.ec2.model.ImportImageRequest; import com.amazonaws.services.ec2.model.ImportImageResult; import com.amazonaws.services.ec2.model.ImportInstanceRequest; @@ -403,18 +546,32 @@ import com.amazonaws.services.ec2.model.Instance; import com.amazonaws.services.ec2.model.InstanceState; import com.amazonaws.services.ec2.model.InstanceStateName; +import com.amazonaws.services.ec2.model.ModifyCapacityReservationRequest; +import com.amazonaws.services.ec2.model.ModifyCapacityReservationResult; +import com.amazonaws.services.ec2.model.ModifyClientVpnEndpointRequest; +import com.amazonaws.services.ec2.model.ModifyClientVpnEndpointResult; +import com.amazonaws.services.ec2.model.ModifyFleetRequest; +import com.amazonaws.services.ec2.model.ModifyFleetResult; +import com.amazonaws.services.ec2.model.ModifyFpgaImageAttributeRequest; +import com.amazonaws.services.ec2.model.ModifyFpgaImageAttributeResult; import com.amazonaws.services.ec2.model.ModifyHostsRequest; import com.amazonaws.services.ec2.model.ModifyHostsResult; import com.amazonaws.services.ec2.model.ModifyIdFormatRequest; import com.amazonaws.services.ec2.model.ModifyIdFormatResult; +import com.amazonaws.services.ec2.model.ModifyIdentityIdFormatRequest; +import com.amazonaws.services.ec2.model.ModifyIdentityIdFormatResult; import com.amazonaws.services.ec2.model.ModifyImageAttributeRequest; import com.amazonaws.services.ec2.model.ModifyImageAttributeResult; import com.amazonaws.services.ec2.model.ModifyInstanceAttributeRequest; import com.amazonaws.services.ec2.model.ModifyInstanceAttributeResult; +import com.amazonaws.services.ec2.model.ModifyInstanceCapacityReservationAttributesRequest; +import com.amazonaws.services.ec2.model.ModifyInstanceCapacityReservationAttributesResult; +import com.amazonaws.services.ec2.model.ModifyInstanceCreditSpecificationRequest; +import com.amazonaws.services.ec2.model.ModifyInstanceCreditSpecificationResult; import com.amazonaws.services.ec2.model.ModifyInstancePlacementRequest; import com.amazonaws.services.ec2.model.ModifyInstancePlacementResult; -import com.amazonaws.services.ec2.model.ModifyIdentityIdFormatRequest; -import com.amazonaws.services.ec2.model.ModifyIdentityIdFormatResult; +import com.amazonaws.services.ec2.model.ModifyLaunchTemplateRequest; +import com.amazonaws.services.ec2.model.ModifyLaunchTemplateResult; import com.amazonaws.services.ec2.model.ModifyNetworkInterfaceAttributeRequest; import com.amazonaws.services.ec2.model.ModifyNetworkInterfaceAttributeResult; import com.amazonaws.services.ec2.model.ModifyReservedInstancesRequest; @@ -425,32 +582,48 @@ import com.amazonaws.services.ec2.model.ModifySpotFleetRequestResult; import com.amazonaws.services.ec2.model.ModifySubnetAttributeRequest; import com.amazonaws.services.ec2.model.ModifySubnetAttributeResult; +import com.amazonaws.services.ec2.model.ModifyTransitGatewayVpcAttachmentRequest; +import com.amazonaws.services.ec2.model.ModifyTransitGatewayVpcAttachmentResult; import com.amazonaws.services.ec2.model.ModifyVolumeAttributeRequest; import com.amazonaws.services.ec2.model.ModifyVolumeAttributeResult; import com.amazonaws.services.ec2.model.ModifyVolumeRequest; import com.amazonaws.services.ec2.model.ModifyVolumeResult; import com.amazonaws.services.ec2.model.ModifyVpcAttributeRequest; import com.amazonaws.services.ec2.model.ModifyVpcAttributeResult; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointConnectionNotificationRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointConnectionNotificationResult; import com.amazonaws.services.ec2.model.ModifyVpcEndpointRequest; import com.amazonaws.services.ec2.model.ModifyVpcEndpointResult; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointServiceConfigurationRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointServiceConfigurationResult; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointServicePermissionsRequest; +import com.amazonaws.services.ec2.model.ModifyVpcEndpointServicePermissionsResult; +import com.amazonaws.services.ec2.model.ModifyVpcPeeringConnectionOptionsRequest; +import com.amazonaws.services.ec2.model.ModifyVpcPeeringConnectionOptionsResult; +import com.amazonaws.services.ec2.model.ModifyVpcTenancyRequest; +import com.amazonaws.services.ec2.model.ModifyVpcTenancyResult; import com.amazonaws.services.ec2.model.MonitorInstancesRequest; import com.amazonaws.services.ec2.model.MonitorInstancesResult; import com.amazonaws.services.ec2.model.MoveAddressToVpcRequest; import com.amazonaws.services.ec2.model.MoveAddressToVpcResult; +import com.amazonaws.services.ec2.model.ProvisionByoipCidrRequest; +import com.amazonaws.services.ec2.model.ProvisionByoipCidrResult; +import com.amazonaws.services.ec2.model.PurchaseHostReservationRequest; +import com.amazonaws.services.ec2.model.PurchaseHostReservationResult; import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingRequest; import com.amazonaws.services.ec2.model.PurchaseReservedInstancesOfferingResult; import com.amazonaws.services.ec2.model.PurchaseScheduledInstancesRequest; import com.amazonaws.services.ec2.model.PurchaseScheduledInstancesResult; -import com.amazonaws.services.ec2.model.PurchaseHostReservationRequest; -import com.amazonaws.services.ec2.model.PurchaseHostReservationResult; import com.amazonaws.services.ec2.model.RebootInstancesRequest; import com.amazonaws.services.ec2.model.RebootInstancesResult; import com.amazonaws.services.ec2.model.RegisterImageRequest; import com.amazonaws.services.ec2.model.RegisterImageResult; +import com.amazonaws.services.ec2.model.RejectTransitGatewayVpcAttachmentRequest; +import com.amazonaws.services.ec2.model.RejectTransitGatewayVpcAttachmentResult; +import com.amazonaws.services.ec2.model.RejectVpcEndpointConnectionsRequest; +import com.amazonaws.services.ec2.model.RejectVpcEndpointConnectionsResult; import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionRequest; import com.amazonaws.services.ec2.model.RejectVpcPeeringConnectionResult; -import com.amazonaws.services.ec2.model.ModifyVpcPeeringConnectionOptionsRequest; -import com.amazonaws.services.ec2.model.ModifyVpcPeeringConnectionOptionsResult; import com.amazonaws.services.ec2.model.ReleaseAddressRequest; import com.amazonaws.services.ec2.model.ReleaseAddressResult; import com.amazonaws.services.ec2.model.ReleaseHostsRequest; @@ -465,6 +638,8 @@ import com.amazonaws.services.ec2.model.ReplaceRouteResult; import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationRequest; import com.amazonaws.services.ec2.model.ReplaceRouteTableAssociationResult; +import com.amazonaws.services.ec2.model.ReplaceTransitGatewayRouteRequest; +import com.amazonaws.services.ec2.model.ReplaceTransitGatewayRouteResult; import com.amazonaws.services.ec2.model.ReportInstanceStatusRequest; import com.amazonaws.services.ec2.model.ReportInstanceStatusResult; import com.amazonaws.services.ec2.model.RequestSpotFleetRequest; @@ -472,6 +647,8 @@ import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest; import com.amazonaws.services.ec2.model.RequestSpotInstancesResult; import com.amazonaws.services.ec2.model.Reservation; +import com.amazonaws.services.ec2.model.ResetFpgaImageAttributeRequest; +import com.amazonaws.services.ec2.model.ResetFpgaImageAttributeResult; import com.amazonaws.services.ec2.model.ResetImageAttributeRequest; import com.amazonaws.services.ec2.model.ResetImageAttributeResult; import com.amazonaws.services.ec2.model.ResetInstanceAttributeRequest; @@ -482,6 +659,8 @@ import com.amazonaws.services.ec2.model.ResetSnapshotAttributeResult; import com.amazonaws.services.ec2.model.RestoreAddressToClassicRequest; import com.amazonaws.services.ec2.model.RestoreAddressToClassicResult; +import com.amazonaws.services.ec2.model.RevokeClientVpnIngressRequest; +import com.amazonaws.services.ec2.model.RevokeClientVpnIngressResult; import com.amazonaws.services.ec2.model.RevokeSecurityGroupEgressRequest; import com.amazonaws.services.ec2.model.RevokeSecurityGroupEgressResult; import com.amazonaws.services.ec2.model.RevokeSecurityGroupIngressRequest; @@ -490,11 +669,15 @@ import com.amazonaws.services.ec2.model.RunInstancesResult; import com.amazonaws.services.ec2.model.RunScheduledInstancesRequest; import com.amazonaws.services.ec2.model.RunScheduledInstancesResult; +import com.amazonaws.services.ec2.model.SearchTransitGatewayRoutesRequest; +import com.amazonaws.services.ec2.model.SearchTransitGatewayRoutesResult; import com.amazonaws.services.ec2.model.StartInstancesRequest; import com.amazonaws.services.ec2.model.StartInstancesResult; import com.amazonaws.services.ec2.model.StopInstancesRequest; import com.amazonaws.services.ec2.model.StopInstancesResult; import com.amazonaws.services.ec2.model.Tag; +import com.amazonaws.services.ec2.model.TerminateClientVpnConnectionsRequest; +import com.amazonaws.services.ec2.model.TerminateClientVpnConnectionsResult; import com.amazonaws.services.ec2.model.TerminateInstancesRequest; import com.amazonaws.services.ec2.model.TerminateInstancesResult; import com.amazonaws.services.ec2.model.UnassignIpv6AddressesRequest; @@ -507,9 +690,11 @@ import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsEgressResult; import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngressRequest; import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngressResult; +import com.amazonaws.services.ec2.model.WithdrawByoipCidrRequest; +import com.amazonaws.services.ec2.model.WithdrawByoipCidrResult; import com.amazonaws.services.ec2.waiters.AmazonEC2Waiters; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.Collection; @@ -563,7 +748,7 @@ public AmazonEC2Mock(int nodes, List> tagsList, AWSCredentialsProvider @Override public DescribeInstancesResult describeInstances(DescribeInstancesRequest describeInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { Collection filteredInstances = new ArrayList<>(); logger.debug("--> mocking describeInstances"); @@ -660,49 +845,77 @@ public void setRegion(Region region) throws IllegalArgumentException { @Override public AcceptReservedInstancesExchangeQuoteResult acceptReservedInstancesExchangeQuote( AcceptReservedInstancesExchangeQuoteRequest acceptReservedInstancesExchangeQuoteRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AcceptTransitGatewayVpcAttachmentResult acceptTransitGatewayVpcAttachment( + AcceptTransitGatewayVpcAttachmentRequest acceptTransitGatewayVpcAttachmentRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AcceptVpcEndpointConnectionsResult acceptVpcEndpointConnections( + AcceptVpcEndpointConnectionsRequest acceptVpcEndpointConnectionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RebootInstancesResult rebootInstances(RebootInstancesRequest rebootInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesResult describeReservedInstances( DescribeReservedInstancesRequest describeReservedInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateFlowLogsResult createFlowLogs(CreateFlowLogsRequest createFlowLogsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAvailabilityZonesResult describeAvailabilityZones(DescribeAvailabilityZonesRequest describeAvailabilityZonesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RestoreAddressToClassicResult restoreAddressToClassic(RestoreAddressToClassicRequest restoreAddressToClassicRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public RevokeClientVpnIngressResult revokeClientVpnIngress(RevokeClientVpnIngressRequest revokeClientVpnIngressRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DetachVolumeResult detachVolume(DetachVolumeRequest detachVolumeRequest) throws AmazonServiceException, AmazonClientException { + public DetachVolumeResult detachVolume(DetachVolumeRequest detachVolumeRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteKeyPairResult deleteKeyPair(DeleteKeyPairRequest deleteKeyPairRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteLaunchTemplateResult deleteLaunchTemplate(DeleteLaunchTemplateRequest deleteLaunchTemplateRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteLaunchTemplateVersionsResult deleteLaunchTemplateVersions( + DeleteLaunchTemplateVersionsRequest deleteLaunchTemplateVersionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @@ -713,171 +926,231 @@ public DeleteNatGatewayResult deleteNatGateway(DeleteNatGatewayRequest deleteNat @Override public UnmonitorInstancesResult unmonitorInstances(UnmonitorInstancesRequest unmonitorInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public UpdateSecurityGroupRuleDescriptionsIngressResult updateSecurityGroupRuleDescriptionsIngress( UpdateSecurityGroupRuleDescriptionsIngressRequest updateSecurityGroupRuleDescriptionsIngressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public WithdrawByoipCidrResult withdrawByoipCidr(WithdrawByoipCidrRequest withdrawByoipCidrRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public UpdateSecurityGroupRuleDescriptionsEgressResult updateSecurityGroupRuleDescriptionsEgress( UpdateSecurityGroupRuleDescriptionsEgressRequest updateSecurityGroupRuleDescriptionsEgressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachVpnGatewayResult attachVpnGateway(AttachVpnGatewayRequest attachVpnGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AuthorizeClientVpnIngressResult authorizeClientVpnIngress(AuthorizeClientVpnIngressRequest authorizeClientVpnIngressRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateImageResult createImage(CreateImageRequest createImageRequest) throws AmazonServiceException, AmazonClientException { + public CreateImageResult createImage(CreateImageRequest createImageRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteSecurityGroupResult deleteSecurityGroup(DeleteSecurityGroupRequest deleteSecurityGroupRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateInstanceExportTaskResult createInstanceExportTask(CreateInstanceExportTaskRequest createInstanceExportTaskRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AuthorizeSecurityGroupEgressResult authorizeSecurityGroupEgress( - AuthorizeSecurityGroupEgressRequest authorizeSecurityGroupEgressRequest) throws AmazonServiceException, AmazonClientException { + AuthorizeSecurityGroupEgressRequest authorizeSecurityGroupEgressRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateDhcpOptionsResult associateDhcpOptions(AssociateDhcpOptionsRequest associateDhcpOptionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetPasswordDataResult getPasswordData(GetPasswordDataRequest getPasswordDataRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetReservedInstancesExchangeQuoteResult getReservedInstancesExchangeQuote( GetReservedInstancesExchangeQuoteRequest getReservedInstancesExchangeQuoteRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public GetTransitGatewayAttachmentPropagationsResult getTransitGatewayAttachmentPropagations( + GetTransitGatewayAttachmentPropagationsRequest getTransitGatewayAttachmentPropagationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public GetTransitGatewayRouteTableAssociationsResult getTransitGatewayRouteTableAssociations( + GetTransitGatewayRouteTableAssociationsRequest getTransitGatewayRouteTableAssociationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public GetTransitGatewayRouteTablePropagationsResult getTransitGatewayRouteTablePropagations( + GetTransitGatewayRouteTablePropagationsRequest getTransitGatewayRouteTablePropagationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ImportClientVpnClientCertificateRevocationListResult importClientVpnClientCertificateRevocationList( + ImportClientVpnClientCertificateRevocationListRequest importClientVpnClientCertificateRevocationListRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public StopInstancesResult stopInstances(StopInstancesRequest stopInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public TerminateClientVpnConnectionsResult terminateClientVpnConnections( + TerminateClientVpnConnectionsRequest terminateClientVpnConnectionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportKeyPairResult importKeyPair(ImportKeyPairRequest importKeyPairRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteNetworkInterfaceResult deleteNetworkInterface(DeleteNetworkInterfaceRequest deleteNetworkInterfaceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVpcAttributeResult modifyVpcAttribute(ModifyVpcAttributeRequest modifyVpcAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetInstancesResult describeSpotFleetInstances(DescribeSpotFleetInstancesRequest describeSpotFleetInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSecurityGroupResult createSecurityGroup(CreateSecurityGroupRequest createSecurityGroupRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotPriceHistoryResult describeSpotPriceHistory(DescribeSpotPriceHistoryRequest describeSpotPriceHistoryRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkInterfacesResult describeNetworkInterfaces(DescribeNetworkInterfacesRequest describeNetworkInterfacesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkInterfacePermissionsResult describeNetworkInterfacePermissions( DescribeNetworkInterfacePermissionsRequest describeNetworkInterfacePermissionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRegionsResult describeRegions(DescribeRegionsRequest describeRegionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateDhcpOptionsResult createDhcpOptions(CreateDhcpOptionsRequest createDhcpOptionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateReservedInstancesListingResult createReservedInstancesListing( CreateReservedInstancesListingRequest createReservedInstancesListingRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteVpcEndpointsResult deleteVpcEndpoints(DeleteVpcEndpointsRequest deleteVpcEndpointsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ResetSnapshotAttributeResult resetSnapshotAttribute(ResetSnapshotAttributeRequest resetSnapshotAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DeleteRouteResult deleteRoute(DeleteRouteRequest deleteRouteRequest) throws AmazonServiceException, AmazonClientException { + public DeleteRouteResult deleteRoute(DeleteRouteRequest deleteRouteRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInternetGatewaysResult describeInternetGateways(DescribeInternetGatewaysRequest describeInternetGatewaysRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ImportVolumeResult importVolume(ImportVolumeRequest importVolumeRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyCapacityReservationResult modifyCapacityReservation(ModifyCapacityReservationRequest modifyCapacityReservationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ImportVolumeResult importVolume(ImportVolumeRequest importVolumeRequest) throws AmazonServiceException, AmazonClientException { + public ModifyClientVpnEndpointResult modifyClientVpnEndpoint(ModifyClientVpnEndpointRequest modifyClientVpnEndpointRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyFleetResult modifyFleet(ModifyFleetRequest modifyFleetRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyFpgaImageAttributeResult modifyFpgaImageAttribute(ModifyFpgaImageAttributeRequest modifyFpgaImageAttributeRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @@ -893,514 +1166,599 @@ public ModifyIdFormatResult modifyIdFormat(ModifyIdFormatRequest modifyIdFormatR @Override public DescribeSecurityGroupsResult describeSecurityGroups(DescribeSecurityGroupsRequest describeSecurityGroupsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeStaleSecurityGroupsResult describeStaleSecurityGroups( DescribeStaleSecurityGroupsRequest describeStaleSecurityGroupsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSecurityGroupReferencesResult describeSecurityGroupReferences( DescribeSecurityGroupReferencesRequest describeSecurityGroupReferencesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RejectVpcPeeringConnectionResult rejectVpcPeeringConnection( RejectVpcPeeringConnectionRequest rejectVpcPeeringConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVpcPeeringConnectionOptionsResult modifyVpcPeeringConnectionOptions( ModifyVpcPeeringConnectionOptionsRequest modifyVpcPeeringConnectionOptionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyVpcTenancyResult modifyVpcTenancy(ModifyVpcTenancyRequest modifyVpcTenancyRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteFlowLogsResult deleteFlowLogs(DeleteFlowLogsRequest deleteFlowLogsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteFpgaImageResult deleteFpgaImage(DeleteFpgaImageRequest deleteFpgaImageRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachVpnGatewayResult detachVpnGateway(DetachVpnGatewayRequest detachVpnGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DisableTransitGatewayRouteTablePropagationResult disableTransitGatewayRouteTablePropagation( + DisableTransitGatewayRouteTablePropagationRequest disableTransitGatewayRouteTablePropagationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeregisterImageResult deregisterImage(DeregisterImageRequest deregisterImageRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription( DescribeSpotDatafeedSubscriptionRequest describeSpotDatafeedSubscriptionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteTagsResult deleteTags(DeleteTagsRequest deleteTagsRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteTransitGatewayResult deleteTransitGateway(DeleteTransitGatewayRequest deleteTransitGatewayRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DeleteTagsResult deleteTags(DeleteTagsRequest deleteTagsRequest) throws AmazonServiceException, AmazonClientException { + public DeleteTransitGatewayRouteResult deleteTransitGatewayRoute(DeleteTransitGatewayRouteRequest deleteTransitGatewayRouteRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteTransitGatewayRouteTableResult deleteTransitGatewayRouteTable( + DeleteTransitGatewayRouteTableRequest deleteTransitGatewayRouteTableRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteTransitGatewayVpcAttachmentResult deleteTransitGatewayVpcAttachment( + DeleteTransitGatewayVpcAttachmentRequest deleteTransitGatewayVpcAttachmentRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteSubnetResult deleteSubnet(DeleteSubnetRequest deleteSubnetRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAccountAttributesResult describeAccountAttributes(DescribeAccountAttributesRequest describeAccountAttributesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachClassicLinkVpcResult attachClassicLinkVpc(AttachClassicLinkVpcRequest attachClassicLinkVpcRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpnGatewayResult createVpnGateway(CreateVpnGatewayRequest createVpnGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteClientVpnEndpointResult deleteClientVpnEndpoint(DeleteClientVpnEndpointRequest deleteClientVpnEndpointRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteClientVpnRouteResult deleteClientVpnRoute(DeleteClientVpnRouteRequest deleteClientVpnRouteRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public EnableVolumeIOResult enableVolumeIO(EnableVolumeIORequest enableVolumeIORequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public MoveAddressToVpcResult moveAddressToVpc(MoveAddressToVpcRequest moveAddressToVpcRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ProvisionByoipCidrResult provisionByoipCidr(ProvisionByoipCidrRequest provisionByoipCidrRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteVpnGatewayResult deleteVpnGateway(DeleteVpnGatewayRequest deleteVpnGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public AttachVolumeResult attachVolume(AttachVolumeRequest attachVolumeRequest) throws AmazonServiceException, AmazonClientException { + public DeprovisionByoipCidrResult deprovisionByoipCidr(DeprovisionByoipCidrRequest deprovisionByoipCidrRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AttachVolumeResult attachVolume(AttachVolumeRequest attachVolumeRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumeStatusResult describeVolumeStatus(DescribeVolumeStatusRequest describeVolumeStatusRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumesModificationsResult describeVolumesModifications( DescribeVolumesModificationsRequest describeVolumesModificationsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportSnapshotTasksResult describeImportSnapshotTasks( DescribeImportSnapshotTasksRequest describeImportSnapshotTasksRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnConnectionsResult describeVpnConnections(DescribeVpnConnectionsRequest describeVpnConnectionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ResetImageAttributeResult resetImageAttribute(ResetImageAttributeRequest resetImageAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public EnableVgwRoutePropagationResult enableVgwRoutePropagation(EnableVgwRoutePropagationRequest enableVgwRoutePropagationRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSnapshotResult createSnapshot(CreateSnapshotRequest createSnapshotRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DeleteVolumeResult deleteVolume(DeleteVolumeRequest deleteVolumeRequest) throws AmazonServiceException, AmazonClientException { + public DeleteVolumeResult deleteVolume(DeleteVolumeRequest deleteVolumeRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkInterfaceResult createNetworkInterface(CreateNetworkInterfaceRequest createNetworkInterfaceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyReservedInstancesResult modifyReservedInstances(ModifyReservedInstancesRequest modifyReservedInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelSpotFleetRequestsResult cancelSpotFleetRequests(CancelSpotFleetRequestsRequest cancelSpotFleetRequestsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public UnassignPrivateIpAddressesResult unassignPrivateIpAddresses(UnassignPrivateIpAddressesRequest unassignPrivateIpAddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public UnassignIpv6AddressesResult unassignIpv6Addresses(UnassignIpv6AddressesRequest unassignIpv6AddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpcsResult describeVpcs(DescribeVpcsRequest describeVpcsRequest) throws AmazonServiceException, AmazonClientException { + public DescribeVpcsResult describeVpcs(DescribeVpcsRequest describeVpcsRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelConversionTaskResult cancelConversionTask(CancelConversionTaskRequest cancelConversionTaskRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateAddressResult associateAddress(AssociateAddressRequest associateAddressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AssociateClientVpnTargetNetworkResult associateClientVpnTargetNetwork( + AssociateClientVpnTargetNetworkRequest associateClientVpnTargetNetworkRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateIamInstanceProfileResult associateIamInstanceProfile(AssociateIamInstanceProfileRequest associateIamInstanceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateVpcCidrBlockResult associateVpcCidrBlock(AssociateVpcCidrBlockRequest associateVpcCidrBlockRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateSubnetCidrBlockResult associateSubnetCidrBlock(AssociateSubnetCidrBlockRequest associateSubnetCidrBlockRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public AssociateTransitGatewayRouteTableResult associateTransitGatewayRouteTable( + AssociateTransitGatewayRouteTableRequest associateTransitGatewayRouteTableRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteCustomerGatewayResult deleteCustomerGateway(DeleteCustomerGatewayRequest deleteCustomerGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkAclEntryResult createNetworkAclEntry(CreateNetworkAclEntryRequest createNetworkAclEntryRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection(AcceptVpcPeeringConnectionRequest acceptVpcPeeringConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeExportTasksResult describeExportTasks(DescribeExportTasksRequest describeExportTasksRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeElasticGpusResult describeElasticGpus(DescribeElasticGpusRequest describeElasticGpusRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFpgaImagesResult describeFpgaImages(DescribeFpgaImagesRequest describeFpgaImagesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeHostReservationOfferingsResult describeHostReservationOfferings( DescribeHostReservationOfferingsRequest describeHostReservationOfferingsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeHostReservationsResult describeHostReservations(DescribeHostReservationsRequest describeHostReservationsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeIdentityIdFormatResult describeIdentityIdFormat(DescribeIdentityIdFormatRequest describeIdentityIdFormatRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachInternetGatewayResult detachInternetGateway(DetachInternetGatewayRequest detachInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcPeeringConnectionResult createVpcPeeringConnection(CreateVpcPeeringConnectionRequest createVpcPeeringConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateRouteTableResult createRouteTable(CreateRouteTableRequest createRouteTableRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelImportTaskResult cancelImportTask(CancelImportTaskRequest cancelImportTaskRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumesResult describeVolumes(DescribeVolumesRequest describeVolumesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings( DescribeReservedInstancesListingsRequest describeReservedInstancesListingsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReportInstanceStatusResult reportInstanceStatus(ReportInstanceStatusRequest reportInstanceStatusRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeRouteTablesResult describeRouteTables(DescribeRouteTablesRequest describeRouteTablesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeDhcpOptionsResult describeDhcpOptions(DescribeDhcpOptionsRequest describeDhcpOptionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeEgressOnlyInternetGatewaysResult describeEgressOnlyInternetGateways( DescribeEgressOnlyInternetGatewaysRequest describeEgressOnlyInternetGatewaysRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public MonitorInstancesResult monitorInstances(MonitorInstancesRequest monitorInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePrefixListsResult describePrefixLists(DescribePrefixListsRequest describePrefixListsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotFleetResult requestSpotFleet(RequestSpotFleetRequest requestSpotFleetRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImportImageTasksResult describeImportImageTasks(DescribeImportImageTasksRequest describeImportImageTasksRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkAclsResult describeNetworkAcls(DescribeNetworkAclsRequest describeNetworkAclsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeBundleTasksResult describeBundleTasks(DescribeBundleTasksRequest describeBundleTasksRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportInstanceResult importInstance(ImportInstanceRequest importInstanceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteVpcPeeringConnectionResult deleteVpcPeeringConnection(DeleteVpcPeeringConnectionRequest deleteVpcPeeringConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetConsoleOutputResult getConsoleOutput(GetConsoleOutputRequest getConsoleOutputRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetConsoleScreenshotResult getConsoleScreenshot(GetConsoleScreenshotRequest getConsoleScreenshotRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public GetHostReservationPurchasePreviewResult getHostReservationPurchasePreview( GetHostReservationPurchasePreviewRequest getHostReservationPurchasePreviewRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public GetLaunchTemplateDataResult getLaunchTemplateData(GetLaunchTemplateDataRequest getLaunchTemplateDataRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateInternetGatewayResult createInternetGateway(CreateInternetGatewayRequest createInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteVpnConnectionRouteResult deleteVpnConnectionRoute(DeleteVpnConnectionRouteRequest deleteVpnConnectionRouteRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachNetworkInterfaceResult detachNetworkInterface(DetachNetworkInterfaceRequest detachNetworkInterfaceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyImageAttributeResult modifyImageAttribute(ModifyImageAttributeRequest modifyImageAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateCustomerGatewayResult createCustomerGateway(CreateCustomerGatewayRequest createCustomerGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateDefaultSubnetResult createDefaultSubnet(CreateDefaultSubnetRequest createDefaultSubnetRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateEgressOnlyInternetGatewayResult createEgressOnlyInternetGateway( CreateEgressOnlyInternetGatewayRequest createEgressOnlyInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateFleetResult createFleet(CreateFleetRequest createFleetRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateFpgaImageResult createFpgaImage(CreateFpgaImageRequest createFpgaImageRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkInterfacePermissionResult createNetworkInterfacePermission( CreateNetworkInterfacePermissionRequest createNetworkInterfacePermissionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateDefaultVpcResult createDefaultVpc(CreateDefaultVpcRequest createDefaultVpcRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateSpotDatafeedSubscriptionResult createSpotDatafeedSubscription( CreateSpotDatafeedSubscriptionRequest createSpotDatafeedSubscriptionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachInternetGatewayResult attachInternetGateway(AttachInternetGatewayRequest attachInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteVpnConnectionResult deleteVpnConnection(DeleteVpnConnectionRequest deleteVpnConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeMovingAddressesResult describeMovingAddresses(DescribeMovingAddressesRequest describeMovingAddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeConversionTasksResult describeConversionTasks(DescribeConversionTasksRequest describeConversionTasksRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpnConnectionResult createVpnConnection(CreateVpnConnectionRequest createVpnConnectionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ImportImageResult importImage(ImportImageRequest importImageRequest) throws AmazonServiceException, AmazonClientException { + public ImportImageResult importImage(ImportImageRequest importImageRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisableVpcClassicLinkResult disableVpcClassicLink(DisableVpcClassicLinkRequest disableVpcClassicLinkRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1412,31 +1770,37 @@ public DisableVpcClassicLinkDnsSupportResult disableVpcClassicLinkDnsSupport( @Override public DescribeInstanceAttributeResult describeInstanceAttribute(DescribeInstanceAttributeRequest describeInstanceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeInstanceCreditSpecificationsResult describeInstanceCreditSpecifications( + DescribeInstanceCreditSpecificationsRequest describeInstanceCreditSpecificationsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeFlowLogsResult describeFlowLogs(DescribeFlowLogsRequest describeFlowLogsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections( DescribeVpcPeeringConnectionsRequest describeVpcPeeringConnectionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribePlacementGroupsResult describePlacementGroups(DescribePlacementGroupsRequest describePlacementGroupsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public RunInstancesResult runInstances(RunInstancesRequest runInstancesRequest) throws AmazonServiceException, AmazonClientException { + public RunInstancesResult runInstances(RunInstancesRequest runInstancesRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1445,45 +1809,63 @@ public RunScheduledInstancesResult runScheduledInstances(RunScheduledInstancesRe throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public SearchTransitGatewayRoutesResult searchTransitGatewayRoutes( + SearchTransitGatewayRoutesRequest searchTransitGatewayRoutesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + @Override public DescribeSubnetsResult describeSubnets(DescribeSubnetsRequest describeSubnetsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssociateRouteTableResult associateRouteTable(AssociateRouteTableRequest associateRouteTableRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVolumeAttributeResult modifyVolumeAttribute(ModifyVolumeAttributeRequest modifyVolumeAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteNetworkAclResult deleteNetworkAcl(DeleteNetworkAclRequest deleteNetworkAclRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImagesResult describeImages(DescribeImagesRequest describeImagesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public StartInstancesResult startInstances(StartInstancesRequest startInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyInstanceAttributeResult modifyInstanceAttribute(ModifyInstanceAttributeRequest modifyInstanceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyInstanceCapacityReservationAttributesResult modifyInstanceCapacityReservationAttributes( + ModifyInstanceCapacityReservationAttributesRequest modifyInstanceCapacityReservationAttributesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyInstanceCreditSpecificationResult modifyInstanceCreditSpecification( + ModifyInstanceCreditSpecificationRequest modifyInstanceCreditSpecificationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1492,6 +1874,11 @@ public ModifyInstancePlacementResult modifyInstancePlacement(ModifyInstancePlace throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public ModifyLaunchTemplateResult modifyLaunchTemplate(ModifyLaunchTemplateRequest modifyLaunchTemplateRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + @Override public ModifyIdentityIdFormatResult modifyIdentityIdFormat(ModifyIdentityIdFormatRequest modifyIdentityIdFormatRequest) { throw new UnsupportedOperationException("Not supported in mock"); @@ -1500,175 +1887,224 @@ public ModifyIdentityIdFormatResult modifyIdentityIdFormat(ModifyIdentityIdForma @Override public CancelReservedInstancesListingResult cancelReservedInstancesListing( CancelReservedInstancesListingRequest cancelReservedInstancesListingRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteDhcpOptionsResult deleteDhcpOptions(DeleteDhcpOptionsRequest deleteDhcpOptionsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteEgressOnlyInternetGatewayResult deleteEgressOnlyInternetGateway( DeleteEgressOnlyInternetGatewayRequest deleteEgressOnlyInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteFleetsResult deleteFleets(DeleteFleetsRequest deleteFleetsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteNetworkInterfacePermissionResult deleteNetworkInterfacePermission( DeleteNetworkInterfacePermissionRequest deleteNetworkInterfacePermissionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AuthorizeSecurityGroupIngressResult authorizeSecurityGroupIngress( AuthorizeSecurityGroupIngressRequest authorizeSecurityGroupIngressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests( - DescribeSpotInstanceRequestsRequest describeSpotInstanceRequestsRequest) throws AmazonServiceException, AmazonClientException { + DescribeSpotInstanceRequestsRequest describeSpotInstanceRequestsRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateVpcResult createVpc(CreateVpcRequest createVpcRequest) throws AmazonServiceException, AmazonClientException { + public CreateVpcResult createVpc(CreateVpcRequest createVpcRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeCustomerGatewaysResult describeCustomerGateways(DescribeCustomerGatewaysRequest describeCustomerGatewaysRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelExportTaskResult cancelExportTask(CancelExportTaskRequest cancelExportTaskRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateRouteResult createRoute(CreateRouteRequest createRouteRequest) throws AmazonServiceException, AmazonClientException { + public CreateRouteResult createRoute(CreateRouteRequest createRouteRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateVpcEndpointResult createVpcEndpoint(CreateVpcEndpointRequest createVpcEndpointRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateVpcEndpointConnectionNotificationResult createVpcEndpointConnectionNotification( + CreateVpcEndpointConnectionNotificationRequest createVpcEndpointConnectionNotificationRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateVpcEndpointServiceConfigurationResult createVpcEndpointServiceConfiguration( + CreateVpcEndpointServiceConfigurationRequest createVpcEndpointServiceConfigurationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CopyImageResult copyImage(CopyImageRequest copyImageRequest) throws AmazonServiceException, AmazonClientException { + public CopyImageResult copyImage(CopyImageRequest copyImageRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcClassicLinkResult describeVpcClassicLink(DescribeVpcClassicLinkRequest describeVpcClassicLinkRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyNetworkInterfaceAttributeResult modifyNetworkInterfaceAttribute( ModifyNetworkInterfaceAttributeRequest modifyNetworkInterfaceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteRouteTableResult deleteRouteTable(DeleteRouteTableRequest deleteRouteTableRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeNetworkInterfaceAttributeResult describeNetworkInterfaceAttribute( DescribeNetworkInterfaceAttributeRequest describeNetworkInterfaceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeClassicLinkInstancesResult describeClassicLinkInstances( - DescribeClassicLinkInstancesRequest describeClassicLinkInstancesRequest) throws AmazonServiceException, AmazonClientException { + DescribeClassicLinkInstancesRequest describeClassicLinkInstancesRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RequestSpotInstancesResult requestSpotInstances(RequestSpotInstancesRequest requestSpotInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateTagsResult createTags(CreateTagsRequest createTagsRequest) throws AmazonServiceException, AmazonClientException { + public ResetFpgaImageAttributeResult resetFpgaImageAttribute(ResetFpgaImageAttributeRequest resetFpgaImageAttributeRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateTagsResult createTags(CreateTagsRequest createTagsRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateTransitGatewayResult createTransitGateway(CreateTransitGatewayRequest createTransitGatewayRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateTransitGatewayRouteResult createTransitGatewayRoute(CreateTransitGatewayRouteRequest createTransitGatewayRouteRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateTransitGatewayRouteTableResult createTransitGatewayRouteTable( + CreateTransitGatewayRouteTableRequest createTransitGatewayRouteTableRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateTransitGatewayVpcAttachmentResult createTransitGatewayVpcAttachment( + CreateTransitGatewayVpcAttachmentRequest createTransitGatewayVpcAttachmentRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVolumeAttributeResult describeVolumeAttribute(DescribeVolumeAttributeRequest describeVolumeAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AttachNetworkInterfaceResult attachNetworkInterface(AttachNetworkInterfaceRequest attachNetworkInterfaceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ReplaceRouteResult replaceRoute(ReplaceRouteRequest replaceRouteRequest) throws AmazonServiceException, AmazonClientException { + public ReplaceRouteResult replaceRoute(ReplaceRouteRequest replaceRouteRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeTagsResult describeTags(DescribeTagsRequest describeTagsRequest) throws AmazonServiceException, AmazonClientException { + public DescribeTagsResult describeTags(DescribeTagsRequest describeTagsRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelBundleTaskResult cancelBundleTask(CancelBundleTaskRequest cancelBundleTaskRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CancelCapacityReservationResult cancelCapacityReservation(CancelCapacityReservationRequest cancelCapacityReservationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisableVgwRoutePropagationResult disableVgwRoutePropagation(DisableVgwRoutePropagationRequest disableVgwRoutePropagationRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ImportSnapshotResult importSnapshot(ImportSnapshotRequest importSnapshotRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CancelSpotInstanceRequestsResult cancelSpotInstanceRequests(CancelSpotInstanceRequestsRequest cancelSpotInstanceRequestsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestsResult describeSpotFleetRequests(DescribeSpotFleetRequestsRequest describeSpotFleetRequestsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public PurchaseReservedInstancesOfferingResult purchaseReservedInstancesOffering( PurchaseReservedInstancesOfferingRequest purchaseReservedInstancesOfferingRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1685,243 +2121,323 @@ public PurchaseHostReservationResult purchaseHostReservation(PurchaseHostReserva @Override public ModifySnapshotAttributeResult modifySnapshotAttribute(ModifySnapshotAttributeRequest modifySnapshotAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications( DescribeReservedInstancesModificationsRequest describeReservedInstancesModificationsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public TerminateInstancesResult terminateInstances(TerminateInstancesRequest terminateInstancesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifyVpcEndpointResult modifyVpcEndpoint(ModifyVpcEndpointRequest modifyVpcEndpointRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyVpcEndpointConnectionNotificationResult modifyVpcEndpointConnectionNotification( + ModifyVpcEndpointConnectionNotificationRequest modifyVpcEndpointConnectionNotificationRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyVpcEndpointServiceConfigurationResult modifyVpcEndpointServiceConfiguration( + ModifyVpcEndpointServiceConfigurationRequest modifyVpcEndpointServiceConfigurationRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ModifyVpcEndpointServicePermissionsResult modifyVpcEndpointServicePermissions( + ModifyVpcEndpointServicePermissionsRequest modifyVpcEndpointServicePermissionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteSpotDatafeedSubscriptionResult deleteSpotDatafeedSubscription( DeleteSpotDatafeedSubscriptionRequest deleteSpotDatafeedSubscriptionRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteInternetGatewayResult deleteInternetGateway(DeleteInternetGatewayRequest deleteInternetGatewayRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSnapshotAttributeResult describeSnapshotAttribute(DescribeSnapshotAttributeRequest describeSnapshotAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReplaceRouteTableAssociationResult replaceRouteTableAssociation( - ReplaceRouteTableAssociationRequest replaceRouteTableAssociationRequest) throws AmazonServiceException, AmazonClientException { + ReplaceRouteTableAssociationRequest replaceRouteTableAssociationRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ReplaceTransitGatewayRouteResult replaceTransitGatewayRoute( + ReplaceTransitGatewayRouteRequest replaceTransitGatewayRouteRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeAddressesResult describeAddresses(DescribeAddressesRequest describeAddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeImageAttributeResult describeImageAttribute(DescribeImageAttributeRequest describeImageAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeKeyPairsResult describeKeyPairs(DescribeKeyPairsRequest describeKeyPairsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ConfirmProductInstanceResult confirmProductInstance(ConfirmProductInstanceRequest confirmProductInstanceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CopyFpgaImageResult copyFpgaImage(CopyFpgaImageRequest copyFpgaImageRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisassociateRouteTableResult disassociateRouteTable(DisassociateRouteTableRequest disassociateRouteTableRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisassociateIamInstanceProfileResult disassociateIamInstanceProfile( DisassociateIamInstanceProfileRequest disassociateIamInstanceProfileRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisassociateVpcCidrBlockResult disassociateVpcCidrBlock(DisassociateVpcCidrBlockRequest disassociateVpcCidrBlockRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public EnableTransitGatewayRouteTablePropagationResult enableTransitGatewayRouteTablePropagation( + EnableTransitGatewayRouteTablePropagationRequest enableTransitGatewayRouteTablePropagationRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisassociateSubnetCidrBlockResult disassociateSubnetCidrBlock( - DisassociateSubnetCidrBlockRequest disassociateSubnetCidrBlockRequest) throws AmazonServiceException, AmazonClientException { + DisassociateSubnetCidrBlockRequest disassociateSubnetCidrBlockRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DisassociateTransitGatewayRouteTableResult disassociateTransitGatewayRouteTable( + DisassociateTransitGatewayRouteTableRequest disassociateTransitGatewayRouteTableRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcAttributeResult describeVpcAttribute(DescribeVpcAttributeRequest describeVpcAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RevokeSecurityGroupEgressResult revokeSecurityGroupEgress(RevokeSecurityGroupEgressRequest revokeSecurityGroupEgressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteNetworkAclEntryResult deleteNetworkAclEntry(DeleteNetworkAclEntryRequest deleteNetworkAclEntryRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateVolumeResult createVolume(CreateVolumeRequest createVolumeRequest) throws AmazonServiceException, AmazonClientException { + public CreateVolumeResult createVolume(CreateVolumeRequest createVolumeRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ModifyVolumeResult modifyVolume(ModifyVolumeRequest modifyVolumeRequest) throws AmazonServiceException, AmazonClientException { + public ModifyVolumeResult modifyVolume(ModifyVolumeRequest modifyVolumeRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeInstanceStatusResult describeInstanceStatus(DescribeInstanceStatusRequest describeInstanceStatusRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpnGatewaysResult describeVpnGateways(DescribeVpnGatewaysRequest describeVpnGatewaysRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateSubnetResult createSubnet(CreateSubnetRequest createSubnetRequest) throws AmazonServiceException, AmazonClientException { + public CreateSubnetResult createSubnet(CreateSubnetRequest createSubnetRequest) throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings( DescribeReservedInstancesOfferingsRequest describeReservedInstancesOfferingsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssignPrivateIpAddressesResult assignPrivateIpAddresses(AssignPrivateIpAddressesRequest assignPrivateIpAddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AssignIpv6AddressesResult assignIpv6Addresses(AssignIpv6AddressesRequest assignIpv6AddressesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSpotFleetRequestHistoryResult describeSpotFleetRequestHistory( DescribeSpotFleetRequestHistoryRequest describeSpotFleetRequestHistoryRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeleteSnapshotResult deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReplaceNetworkAclAssociationResult replaceNetworkAclAssociation( ReplaceNetworkAclAssociationRequest replaceNetworkAclAssociationRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DisassociateAddressResult disassociateAddress(DisassociateAddressRequest disassociateAddressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DisassociateClientVpnTargetNetworkResult disassociateClientVpnTargetNetwork( + DisassociateClientVpnTargetNetworkRequest disassociateClientVpnTargetNetworkRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreatePlacementGroupResult createPlacementGroup(CreatePlacementGroupRequest createPlacementGroupRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public BundleInstanceResult bundleInstance(BundleInstanceRequest bundleInstanceRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DeletePlacementGroupResult deletePlacementGroup(DeletePlacementGroupRequest deletePlacementGroupRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ModifySubnetAttributeResult modifySubnetAttribute(ModifySubnetAttributeRequest modifySubnetAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DeleteVpcResult deleteVpc(DeleteVpcRequest deleteVpcRequest) throws AmazonServiceException, AmazonClientException { + public ModifyTransitGatewayVpcAttachmentResult modifyTransitGatewayVpcAttachment( + ModifyTransitGatewayVpcAttachmentRequest modifyTransitGatewayVpcAttachmentRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CopySnapshotResult copySnapshot(CopySnapshotRequest copySnapshotRequest) throws AmazonServiceException, AmazonClientException { + public DeleteVpcResult deleteVpc(DeleteVpcRequest deleteVpcRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteVpcEndpointConnectionNotificationsResult deleteVpcEndpointConnectionNotifications( + DeleteVpcEndpointConnectionNotificationsRequest deleteVpcEndpointConnectionNotificationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteVpcEndpointServiceConfigurationsResult deleteVpcEndpointServiceConfigurations( + DeleteVpcEndpointServiceConfigurationsRequest deleteVpcEndpointServiceConfigurationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CopySnapshotResult copySnapshot(CopySnapshotRequest copySnapshotRequest) throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateCapacityReservationResult createCapacityReservation(CreateCapacityReservationRequest createCapacityReservationRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateClientVpnEndpointResult createClientVpnEndpoint(CreateClientVpnEndpointRequest createClientVpnEndpointRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateClientVpnRouteResult createClientVpnRoute(CreateClientVpnRouteRequest createClientVpnRouteRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointServicesResult describeVpcEndpointServices( DescribeVpcEndpointServicesRequest describeVpcEndpointServicesRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public AllocateAddressResult allocateAddress(AllocateAddressRequest allocateAddressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ReleaseAddressResult releaseAddress(ReleaseAddressRequest releaseAddressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1938,13 +2454,24 @@ public ReplaceIamInstanceProfileAssociationResult replaceIamInstanceProfileAssoc @Override public ResetInstanceAttributeResult resetInstanceAttribute(ResetInstanceAttributeRequest resetInstanceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateKeyPairResult createKeyPair(CreateKeyPairRequest createKeyPairRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateLaunchTemplateResult createLaunchTemplate(CreateLaunchTemplateRequest createLaunchTemplateRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateLaunchTemplateVersionResult createLaunchTemplateVersion( + CreateLaunchTemplateVersionRequest createLaunchTemplateVersionRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1955,38 +2482,50 @@ public CreateNatGatewayResult createNatGateway(CreateNatGatewayRequest createNat @Override public ReplaceNetworkAclEntryResult replaceNetworkAclEntry(ReplaceNetworkAclEntryRequest replaceNetworkAclEntryRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeSnapshotsResult describeSnapshots(DescribeSnapshotsRequest describeSnapshotsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public CreateNetworkAclResult createNetworkAcl(CreateNetworkAclRequest createNetworkAclRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RegisterImageResult registerImage(RegisterImageRequest registerImageRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public RejectTransitGatewayVpcAttachmentResult rejectTransitGatewayVpcAttachment( + RejectTransitGatewayVpcAttachmentRequest rejectTransitGatewayVpcAttachmentRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public RejectVpcEndpointConnectionsResult rejectVpcEndpointConnections( + RejectVpcEndpointConnectionsRequest rejectVpcEndpointConnectionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public ResetNetworkInterfaceAttributeResult resetNetworkInterfaceAttribute( ResetNetworkInterfaceAttributeRequest resetNetworkInterfaceAttributeRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public EnableVpcClassicLinkResult enableVpcClassicLink(EnableVpcClassicLinkRequest enableVpcClassicLinkRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -1996,122 +2535,160 @@ public EnableVpcClassicLinkDnsSupportResult enableVpcClassicLinkDnsSupport( throw new UnsupportedOperationException("Not supported in mock"); } + @Override + public ExportClientVpnClientCertificateRevocationListResult exportClientVpnClientCertificateRevocationList( + ExportClientVpnClientCertificateRevocationListRequest exportClientVpnClientCertificateRevocationListRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ExportClientVpnClientConfigurationResult exportClientVpnClientConfiguration( + ExportClientVpnClientConfigurationRequest exportClientVpnClientConfigurationRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ExportTransitGatewayRoutesResult exportTransitGatewayRoutes( + ExportTransitGatewayRoutesRequest exportTransitGatewayRoutesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + @Override public CreateVpnConnectionRouteResult createVpnConnectionRoute(CreateVpnConnectionRouteRequest createVpnConnectionRouteRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeVpcEndpointsResult describeVpcEndpoints(DescribeVpcEndpointsRequest describeVpcEndpointsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DetachClassicLinkVpcResult detachClassicLinkVpc(DetachClassicLinkVpcRequest detachClassicLinkVpcRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeReservedInstancesResult describeReservedInstances() throws AmazonServiceException, AmazonClientException { + public DescribeReservedInstancesResult describeReservedInstances() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeAvailabilityZonesResult describeAvailabilityZones() throws AmazonServiceException, AmazonClientException { + public DescribeAvailabilityZonesResult describeAvailabilityZones() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSpotPriceHistoryResult describeSpotPriceHistory() throws AmazonServiceException, AmazonClientException { + public DescribeSpotPriceHistoryResult describeSpotPriceHistory() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeNetworkInterfacesResult describeNetworkInterfaces() throws AmazonServiceException, AmazonClientException { + public DescribeNetworkInterfacesResult describeNetworkInterfaces() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeRegionsResult describeRegions() throws AmazonServiceException, AmazonClientException { + public DescribeRegionsResult describeRegions() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeInternetGatewaysResult describeInternetGateways() throws AmazonServiceException, AmazonClientException { + public DescribeInternetGatewaysResult describeInternetGateways() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSecurityGroupsResult describeSecurityGroups() throws AmazonServiceException, AmazonClientException { + public DescribeSecurityGroupsResult describeSecurityGroups() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription() throws AmazonServiceException, AmazonClientException { + public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeAccountAttributesResult describeAccountAttributes() throws AmazonServiceException, AmazonClientException { + public DescribeAccountAttributesResult describeAccountAttributes() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVolumeStatusResult describeVolumeStatus() throws AmazonServiceException, AmazonClientException { + public DescribeVolumeStatusResult describeVolumeStatus() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeImportSnapshotTasksResult describeImportSnapshotTasks() throws AmazonServiceException, AmazonClientException { + public DescribeImportSnapshotTasksResult describeImportSnapshotTasks() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpnConnectionsResult describeVpnConnections() throws AmazonServiceException, AmazonClientException { + public DescribeVpnConnectionsResult describeVpnConnections() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpcsResult describeVpcs() throws AmazonServiceException, AmazonClientException { + public DescribeVpcsResult describeVpcs() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { + public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeExportTasksResult describeExportTasks() throws AmazonServiceException, AmazonClientException { + public AdvertiseByoipCidrResult advertiseByoipCidr(AdvertiseByoipCidrRequest advertiseByoipCidrRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateVpcPeeringConnectionResult createVpcPeeringConnection() throws AmazonServiceException, AmazonClientException { + public DescribeExportTasksResult describeExportTasks() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CancelImportTaskResult cancelImportTask() throws AmazonServiceException, AmazonClientException { + public DescribeFleetHistoryResult describeFleetHistory(DescribeFleetHistoryRequest describeFleetHistoryRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVolumesResult describeVolumes() throws AmazonServiceException, AmazonClientException { + public DescribeFleetInstancesResult describeFleetInstances(DescribeFleetInstancesRequest describeFleetInstancesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeFleetsResult describeFleets(DescribeFleetsRequest describeFleetsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CreateVpcPeeringConnectionResult createVpcPeeringConnection() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public CancelImportTaskResult cancelImportTask() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeVolumesResult describeVolumes() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesListingsResult describeReservedInstancesListings() - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeRouteTablesResult describeRouteTables() throws AmazonServiceException, AmazonClientException { + public DescribeRouteTablesResult describeRouteTables() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2128,48 +2705,69 @@ public DescribeScheduledInstancesResult describeScheduledInstances( } @Override - public DescribeDhcpOptionsResult describeDhcpOptions() throws AmazonServiceException, AmazonClientException { + public DescribeDhcpOptionsResult describeDhcpOptions() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribePrefixListsResult describePrefixLists() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribePrefixListsResult describePrefixLists() throws AmazonServiceException, AmazonClientException { + public DescribePrincipalIdFormatResult describePrincipalIdFormat(DescribePrincipalIdFormatRequest describePrincipalIdFormatRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeImportImageTasksResult describeImportImageTasks() throws AmazonServiceException, AmazonClientException { + public DescribePublicIpv4PoolsResult describePublicIpv4Pools(DescribePublicIpv4PoolsRequest describePublicIpv4PoolsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeNetworkAclsResult describeNetworkAcls() throws AmazonServiceException, AmazonClientException { + public DescribeImportImageTasksResult describeImportImageTasks() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeBundleTasksResult describeBundleTasks() throws AmazonServiceException, AmazonClientException { + public DescribeNetworkAclsResult describeNetworkAcls() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeBundleTasksResult describeBundleTasks() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeByoipCidrsResult describeByoipCidrs(DescribeByoipCidrsRequest describeByoipCidrsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeCapacityReservationsResult describeCapacityReservations( + DescribeCapacityReservationsRequest describeCapacityReservationsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override public RevokeSecurityGroupIngressResult revokeSecurityGroupIngress(RevokeSecurityGroupIngressRequest revokeSecurityGroupIngressRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public RevokeSecurityGroupIngressResult revokeSecurityGroupIngress() throws AmazonServiceException, AmazonClientException { + public RevokeSecurityGroupIngressResult revokeSecurityGroupIngress() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public CreateInternetGatewayResult createInternetGateway() throws AmazonServiceException, AmazonClientException { + public CreateInternetGatewayResult createInternetGateway() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeMovingAddressesResult describeMovingAddresses() throws AmazonServiceException, AmazonClientException { + public DescribeMovingAddressesResult describeMovingAddresses() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2179,17 +2777,23 @@ public DescribeNatGatewaysResult describeNatGateways(DescribeNatGatewaysRequest } @Override - public DescribeConversionTasksResult describeConversionTasks() throws AmazonServiceException, AmazonClientException { + public DescribeConversionTasksResult describeConversionTasks() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ImportImageResult importImage() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ImportImageResult importImage() throws AmazonServiceException, AmazonClientException { + public DescribeFlowLogsResult describeFlowLogs() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeFlowLogsResult describeFlowLogs() throws AmazonServiceException, AmazonClientException { + public DescribeFpgaImageAttributeResult describeFpgaImageAttribute( + DescribeFpgaImageAttributeRequest describeFpgaImageAttributeRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2206,7 +2810,7 @@ public DescribeHostsResult describeHosts() { @Override public DescribeIamInstanceProfileAssociationsResult describeIamInstanceProfileAssociations( DescribeIamInstanceProfileAssociationsRequest describeIamInstanceProfileAssociationsRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2221,42 +2825,42 @@ public DescribeIdFormatResult describeIdFormat() { } @Override - public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() throws AmazonServiceException, AmazonClientException { + public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribePlacementGroupsResult describePlacementGroups() throws AmazonServiceException, AmazonClientException { + public DescribePlacementGroupsResult describePlacementGroups() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSubnetsResult describeSubnets() throws AmazonServiceException, AmazonClientException { + public DescribeSubnetsResult describeSubnets() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeInstancesResult describeInstances() throws AmazonServiceException, AmazonClientException { + public DescribeInstancesResult describeInstances() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeImagesResult describeImages() throws AmazonServiceException, AmazonClientException { + public DescribeImagesResult describeImages() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests() throws AmazonServiceException, AmazonClientException { + public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeCustomerGatewaysResult describeCustomerGateways() throws AmazonServiceException, AmazonClientException { + public DescribeCustomerGatewaysResult describeCustomerGateways() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpcClassicLinkResult describeVpcClassicLink() throws AmazonServiceException, AmazonClientException { + public DescribeVpcClassicLinkResult describeVpcClassicLink() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2267,69 +2871,162 @@ public DescribeVpcClassicLinkDnsSupportResult describeVpcClassicLinkDnsSupport( } @Override - public DescribeClassicLinkInstancesResult describeClassicLinkInstances() throws AmazonServiceException, AmazonClientException { + public DescribeVpcEndpointConnectionNotificationsResult describeVpcEndpointConnectionNotifications( + DescribeVpcEndpointConnectionNotificationsRequest describeVpcEndpointConnectionNotificationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeVpcEndpointConnectionsResult describeVpcEndpointConnections( + DescribeVpcEndpointConnectionsRequest describeVpcEndpointConnectionsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeVpcEndpointServiceConfigurationsResult describeVpcEndpointServiceConfigurations( + DescribeVpcEndpointServiceConfigurationsRequest describeVpcEndpointServiceConfigurationsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeVpcEndpointServicePermissionsResult describeVpcEndpointServicePermissions( + DescribeVpcEndpointServicePermissionsRequest describeVpcEndpointServicePermissionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeTagsResult describeTags() throws AmazonServiceException, AmazonClientException { + public DescribeClassicLinkInstancesResult describeClassicLinkInstances() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public ImportSnapshotResult importSnapshot() throws AmazonServiceException, AmazonClientException { + public DescribeClientVpnAuthorizationRulesResult describeClientVpnAuthorizationRules( + DescribeClientVpnAuthorizationRulesRequest describeClientVpnAuthorizationRulesRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeSpotFleetRequestsResult describeSpotFleetRequests() throws AmazonServiceException, AmazonClientException { + public DescribeClientVpnConnectionsResult describeClientVpnConnections( + DescribeClientVpnConnectionsRequest describeClientVpnConnectionsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeClientVpnEndpointsResult describeClientVpnEndpoints( + DescribeClientVpnEndpointsRequest describeClientVpnEndpointsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeClientVpnRoutesResult describeClientVpnRoutes( + DescribeClientVpnRoutesRequest describeClientVpnRoutesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeClientVpnTargetNetworksResult describeClientVpnTargetNetworks( + DescribeClientVpnTargetNetworksRequest describeClientVpnTargetNetworksRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeTagsResult describeTags() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeTransitGatewayAttachmentsResult describeTransitGatewayAttachments( + DescribeTransitGatewayAttachmentsRequest describeTransitGatewayAttachmentsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeTransitGatewayRouteTablesResult describeTransitGatewayRouteTables( + DescribeTransitGatewayRouteTablesRequest describeTransitGatewayRouteTablesRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeTransitGatewayVpcAttachmentsResult describeTransitGatewayVpcAttachments( + DescribeTransitGatewayVpcAttachmentsRequest describeTransitGatewayVpcAttachmentsRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeTransitGatewaysResult describeTransitGateways(DescribeTransitGatewaysRequest describeTransitGatewaysRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public ImportSnapshotResult importSnapshot() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeSpotFleetRequestsResult describeSpotFleetRequests() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications() - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DeleteSpotDatafeedSubscriptionResult deleteSpotDatafeedSubscription() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeAddressesResult describeAddresses() throws AmazonClientException { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeAggregateIdFormatResult describeAggregateIdFormat(DescribeAggregateIdFormatRequest describeAggregateIdFormatRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DeleteSpotDatafeedSubscriptionResult deleteSpotDatafeedSubscription() throws AmazonServiceException, AmazonClientException { + public DescribeKeyPairsResult describeKeyPairs() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeAddressesResult describeAddresses() throws AmazonServiceException, AmazonClientException { + public DescribeLaunchTemplateVersionsResult describeLaunchTemplateVersions( + DescribeLaunchTemplateVersionsRequest describeLaunchTemplateVersionsRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeKeyPairsResult describeKeyPairs() throws AmazonServiceException, AmazonClientException { + public DescribeLaunchTemplatesResult describeLaunchTemplates(DescribeLaunchTemplatesRequest describeLaunchTemplatesRequest) { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeInstanceStatusResult describeInstanceStatus() throws AmazonServiceException, AmazonClientException { + public DescribeInstanceStatusResult describeInstanceStatus() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpnGatewaysResult describeVpnGateways() throws AmazonServiceException, AmazonClientException { + public DescribeVpnGatewaysResult describeVpnGateways() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings() - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpcEndpointServicesResult describeVpcEndpointServices() throws AmazonServiceException, AmazonClientException { + public DescribeVpcEndpointServicesResult describeVpcEndpointServices() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public AllocateAddressResult allocateAddress() throws AmazonServiceException, AmazonClientException { + public AllocateAddressResult allocateAddress() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2339,18 +3036,24 @@ public AllocateHostsResult allocateHosts(AllocateHostsRequest allocateHostsReque } @Override - public DescribeSnapshotsResult describeSnapshots() throws AmazonServiceException, AmazonClientException { + public ApplySecurityGroupsToClientVpnTargetNetworkResult applySecurityGroupsToClientVpnTargetNetwork( + ApplySecurityGroupsToClientVpnTargetNetworkRequest applySecurityGroupsToClientVpnTargetNetworkRequest) { + throw new UnsupportedOperationException("Not supported in mock"); + } + + @Override + public DescribeSnapshotsResult describeSnapshots() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override - public DescribeVpcEndpointsResult describeVpcEndpoints() throws AmazonServiceException, AmazonClientException { + public DescribeVpcEndpointsResult describeVpcEndpoints() throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @Override public DryRunResult dryRun(DryRunSupportedRequest request) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } @@ -2370,7 +3073,7 @@ public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest reques @Override public ModifySpotFleetRequestResult modifySpotFleetRequest(ModifySpotFleetRequestRequest modifySpotFleetRequestRequest) - throws AmazonServiceException, AmazonClientException { + throws AmazonClientException { throw new UnsupportedOperationException("Not supported in mock"); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 9d7d7e0eb0677..6703812a4ec0c 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -77,7 +77,7 @@ public void createTransportService() { new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) { @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { // we just need to ensure we don't resolve DNS here return new TransportAddress[] {poorMansDNS.getOrDefault(address, buildNewFakeTransportAddress())}; } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceSeedHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceSeedHostsProvider.java index fded7c2445d2a..d193cb25c6e31 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceSeedHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceSeedHostsProvider.java @@ -233,8 +233,7 @@ public List getSeedAddresses(HostsResolver hostsResolver) { // ip_private is a single IP Address. We need to build a TransportAddress from it // If user has set `es_port` metadata, we don't need to ping all ports - // we only limit to 1 addresses, makes no sense to ping 100 ports - TransportAddress[] addresses = transportService.addressesFromString(address, 1); + TransportAddress[] addresses = transportService.addressesFromString(address); for (TransportAddress transportAddress : addresses) { logger.trace("adding {}, type {}, address {}, transport_address {}, status {}", name, type, diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index 3caf29c8513b5..b750018fefe87 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -26,7 +27,7 @@ esplugin { noticeFile rootProject.file('NOTICE.txt') } -integTestCluster { +testClusters.integTest { // Adds a setting in the Elasticsearch keystore before running the integration tests - keystoreSetting 'custom.secured', 'password' -} \ No newline at end of file + keystore 'custom.secured', 'password' +} diff --git a/plugins/examples/custom-suggester/build.gradle b/plugins/examples/custom-suggester/build.gradle index 977e467391d8b..a6861c8be6370 100644 --- a/plugins/examples/custom-suggester/build.gradle +++ b/plugins/examples/custom-suggester/build.gradle @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ - +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -27,8 +27,8 @@ esplugin { noticeFile rootProject.file('NOTICE.txt') } -integTestCluster { - numNodes = 2 +testClusters.integTest { + numberOfNodes = 2 } // this plugin has no unit tests, only rest tests diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index 95928c472ca0d..738a3be86afab 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -31,8 +32,8 @@ dependencies { compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}" } -if (System.getProperty('tests.distribution') == null) { - integTestCluster.distribution = 'oss' +testClusters.integTest { + distribution = 'oss' } test.enabled = false diff --git a/plugins/examples/rescore/build.gradle b/plugins/examples/rescore/build.gradle index cdecd760c81e8..e18805bc5477c 100644 --- a/plugins/examples/rescore/build.gradle +++ b/plugins/examples/rescore/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index 98dd093ac17a3..14a6189f9ad4e 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -36,11 +37,11 @@ task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) { args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST' } -integTestCluster { +integTest { dependsOn exampleFixture -} -integTestRunner { - nonInputProperties.systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" + runner { + nonInputProperties.systemProperty 'external.address', "${ -> exampleFixture.addressAndPort }" + } } testingConventions.naming { diff --git a/plugins/examples/script-expert-scoring/build.gradle b/plugins/examples/script-expert-scoring/build.gradle index e9da62acdcff4..6f88baccefc3d 100644 --- a/plugins/examples/script-expert-scoring/build.gradle +++ b/plugins/examples/script-expert-scoring/build.gradle @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { diff --git a/plugins/examples/security-authorization-engine/build.gradle b/plugins/examples/security-authorization-engine/build.gradle index f869e4872ddc3..fba9580525bcc 100644 --- a/plugins/examples/security-authorization-engine/build.gradle +++ b/plugins/examples/security-authorization-engine/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.esplugin' esplugin { @@ -14,15 +15,14 @@ dependencies { testCompile "org.elasticsearch.client:x-pack-transport:${versions.elasticsearch}" } - -integTestRunner { +integTest { + dependsOn buildZip + runner { systemProperty 'tests.security.manager', 'false' + } } -integTestCluster { - dependsOn buildZip - distribution = 'default' - +testClusters.integTest { setting 'xpack.security.enabled', 'true' setting 'xpack.ilm.enabled', 'false' setting 'xpack.ml.enabled', 'false' @@ -34,17 +34,7 @@ integTestCluster { // processors are being used that are in ingest-common module. distribution = 'default' - setupCommand 'setupDummyUser', - 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'custom_superuser' - waitCondition = { node, ant -> - File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", - dest: tmpFile.toString(), - username: 'test_user', - password: 'x-pack-test-password', - ignoreerrors: true, - retries: 10) - return tmpFile.exists() - } + user role: 'custom_superuser' } + check.dependsOn integTest diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java index 2ba7838b90950..6b1a1c9254cf2 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java @@ -39,8 +39,8 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter { public static final String NAME = "annotated"; @Override - protected Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type, HitContext hitContext) { - return new AnnotatedHighlighterAnalyzer(super.getAnalyzer(docMapper, type, hitContext), hitContext); + protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) { + return new AnnotatedHighlighterAnalyzer(super.getAnalyzer(docMapper, hitContext), hitContext); } // Convert the marked-up values held on-disk to plain-text versions for highlighting diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 654836ea0fbef..697125fbd537d 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -22,8 +22,6 @@ import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -40,8 +38,6 @@ import static java.util.Collections.emptyMap; public class AzureBlobStore implements BlobStore { - - private static final Logger logger = LogManager.getLogger(AzureBlobStore.class); private final AzureStorageService service; @@ -82,17 +78,6 @@ public BlobContainer blobContainer(BlobPath path) { return new AzureBlobContainer(path, this); } - @Override - public void delete(BlobPath path) throws IOException { - final String keyPath = path.buildAsString(); - try { - service.deleteFiles(clientName, container, keyPath); - } catch (URISyntaxException | StorageException e) { - logger.warn("cannot access [{}] in container {{}}: {}", keyPath, container, e.getMessage()); - throw new IOException(e); - } - } - @Override public void close() { } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index dab7c9627e6dc..4214e5d408210 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -50,11 +50,11 @@ import java.nio.channels.WritableByteChannel; import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; +import java.util.Map; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -91,11 +91,6 @@ public BlobContainer blobContainer(BlobPath path) { return new GoogleCloudStorageBlobContainer(path, this); } - @Override - public void delete(BlobPath path) throws IOException { - deleteBlobsByPrefix(path.buildAsString()); - } - @Override public void close() { } @@ -291,15 +286,6 @@ void deleteBlob(String blobName) throws IOException { } } - /** - * Deletes multiple blobs from the specific bucket all of which have prefixed names - * - * @param prefix prefix of the blobs to delete - */ - private void deleteBlobsByPrefix(String prefix) throws IOException { - deleteBlobsIgnoringIfNotExists(listBlobsByPrefix("", prefix).keySet()); - } - /** * Deletes multiple blobs from the specific bucket using a batch request * diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java index fde7657fe31d0..ad0e663058554 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobStore.java @@ -66,14 +66,6 @@ private void mkdirs(Path path) throws IOException { }); } - @Override - public void delete(BlobPath path) throws IOException { - execute((Operation) fc -> { - fc.delete(translateToHdfsPath(path), true); - return null; - }); - } - @Override public String toString() { return root.toUri().toString(); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index d4df4094fcf92..fcded00553580 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -20,10 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.services.s3.model.CannedAccessControlList; -import com.amazonaws.services.s3.model.DeleteObjectsRequest; -import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; -import com.amazonaws.services.s3.model.ObjectListing; -import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.blobstore.BlobContainer; @@ -33,7 +29,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import java.io.IOException; -import java.util.ArrayList; import java.util.Locale; class S3BlobStore implements BlobStore { @@ -90,50 +85,6 @@ public BlobContainer blobContainer(BlobPath path) { return new S3BlobContainer(path, this); } - @Override - public void delete(BlobPath path) { - try (AmazonS3Reference clientReference = clientReference()) { - ObjectListing prevListing = null; - // From - // http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html - // we can do at most 1K objects per delete - // We don't know the bucket name until first object listing - DeleteObjectsRequest multiObjectDeleteRequest = null; - final ArrayList keys = new ArrayList<>(); - while (true) { - ObjectListing list; - if (prevListing != null) { - final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); - } else { - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(bucket, path.buildAsString())); - multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); - } - for (final S3ObjectSummary summary : list.getObjectSummaries()) { - keys.add(new KeyVersion(summary.getKey())); - // Every 500 objects batch the delete request - if (keys.size() > 500) { - multiObjectDeleteRequest.setKeys(keys); - final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; - SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); - multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); - keys.clear(); - } - } - if (list.isTruncated()) { - prevListing = list; - } else { - break; - } - } - if (!keys.isEmpty()) { - multiObjectDeleteRequest.setKeys(keys); - final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; - SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); - } - } - } - @Override public void close() throws IOException { this.service.close(); diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java index 826bfd6585f42..fac509a0e868a 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java @@ -57,7 +57,7 @@ public void testLoggingHandler() throws IllegalAccessException { ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + " WRITE: \\d+B"; final MockLogAppender.LoggingExpectation writeExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); final String readPattern = @@ -69,7 +69,7 @@ public void testLoggingHandler() throws IllegalAccessException { " READ: \\d+B"; final MockLogAppender.LoggingExpectation readExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); appender.addExpectation(writeExpectation); diff --git a/qa/build.gradle b/qa/build.gradle index cbcb1d4580704..f1727f115155d 100644 --- a/qa/build.gradle +++ b/qa/build.gradle @@ -12,9 +12,11 @@ subprojects { Project subproj -> */ repositories { maven { + name "elastic" url "https://artifacts.elastic.co/maven" } maven { + name "elastic-snapshots" url "https://snapshots.elastic.co/maven" } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java index c664e28931087..245234baf2012 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/PackageTestCase.java @@ -56,6 +56,7 @@ import static org.elasticsearch.packaging.util.Packages.stopElasticsearch; import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation; import static org.elasticsearch.packaging.util.Platforms.getOsRelease; +import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.elasticsearch.packaging.util.Platforms.isSystemd; import static org.elasticsearch.packaging.util.ServerUtils.makeRequest; import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests; @@ -78,6 +79,11 @@ public void onlyCompatibleDistributions() throws Exception { sh = newShell(); } + public void test05CheckLintian() throws Exception { + assumeTrue(isDPKG()); + sh.run("lintian --fail-on-warnings " + FileUtils.getDistributionFile(distribution())); + } + public void test10InstallPackage() throws Exception { assertRemoved(distribution()); installation = install(distribution()); @@ -163,6 +169,9 @@ public void test40StartServer() throws Exception { public void test50Remove() throws Exception { assumeThat(installation, is(notNullValue())); + // add fake bin directory as if a plugin was installed + Files.createDirectories(installation.bin.resolve("myplugin")); + remove(distribution()); // removing must stop the service diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index e557b47fb8912..2eb3a288fbcc2 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -222,7 +222,6 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist "elasticsearch-certgen", "elasticsearch-certutil", "elasticsearch-croneval", - "elasticsearch-migrate", "elasticsearch-saml-metadata", "elasticsearch-setup-passwords", "elasticsearch-sql-cli", diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java index 70ac89dc3b7f5..4d528b96c32e9 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Packages.java @@ -244,7 +244,6 @@ private static void verifyDefaultInstallation(Installation es) { "elasticsearch-certgen", "elasticsearch-certutil", "elasticsearch-croneval", - "elasticsearch-migrate", "elasticsearch-saml-metadata", "elasticsearch-setup-passwords", "elasticsearch-sql-cli", diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 6ffdf24cd5dfc..bcb55079b8269 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -37,6 +37,7 @@ int managementPort repositories { // the Wildfly distribution is not available via a repository, so we fake an Ivy repository on top of the download site ivy { + name "wildfly" url "https://download.jboss.org" metadataSources { artifact() @@ -69,7 +70,7 @@ dependencies { compile "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" compile "org.apache.logging.log4j:log4j-api:${versions.log4j}" compile "org.apache.logging.log4j:log4j-core:${versions.log4j}" - compile project(path: ':client:transport', configuration: 'runtime') + compile project(path: ':client:rest-high-level') wildfly "org.jboss:wildfly:${wildflyVersion}@zip" testCompile "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}" } @@ -92,8 +93,7 @@ task writeElasticsearchProperties { final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties") elasticsearchProperties.write( [ - "transport.uri=${-> integTest.getNodes().get(0).transportUri()}", - "cluster.name=${-> integTest.getNodes().get(0).clusterName}" + "http.uri=${-> integTest.getNodes().get(0).httpUri()}" ].join("\n")) } } @@ -166,7 +166,7 @@ task startWildfly { } } -task configureTransportClient(type: LoggedExec) { +task configureClient(type: LoggedExec) { dependsOn startWildfly // we skip these tests on Windows so we do not need to worry about compatibility here commandLine "${wildflyInstall}/bin/jboss-cli.sh", @@ -181,7 +181,7 @@ task stopWildfly(type: LoggedExec) { } if (!Os.isFamily(Os.FAMILY_WINDOWS)) { - integTestRunner.dependsOn(configureTransportClient) + integTestRunner.dependsOn(configureClient) final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { @Override void afterExecute(final Task task, final TaskState state) { diff --git a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientActivator.java b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientActivator.java similarity index 87% rename from qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientActivator.java rename to qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientActivator.java index 881b263f35b97..c860f9e5e1bf7 100644 --- a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientActivator.java +++ b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientActivator.java @@ -26,11 +26,11 @@ import java.util.Set; @ApplicationPath("/transport") -public class TransportClientActivator extends Application { +public class RestHighLevelClientActivator extends Application { @Override public Set> getClasses() { - return Collections.singleton(TransportClientEmployeeResource.class); + return Collections.singleton(RestHighLevelClientEmployeeResource.class); } } diff --git a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientEmployeeResource.java b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientEmployeeResource.java similarity index 84% rename from qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientEmployeeResource.java rename to qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientEmployeeResource.java index 4008bf8801a55..d99810a9638cc 100644 --- a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientEmployeeResource.java +++ b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientEmployeeResource.java @@ -19,9 +19,12 @@ package org.elasticsearch.wildfly.transport; +import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.wildfly.model.Employee; @@ -33,7 +36,6 @@ import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; - import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; @@ -44,17 +46,17 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @Path("/employees") -public class TransportClientEmployeeResource { +public class RestHighLevelClientEmployeeResource { @Inject - private TransportClient client; + private RestHighLevelClient client; @GET @Path("/{id}") @Produces(MediaType.APPLICATION_JSON) - public Response getEmployeeById(final @PathParam("id") Long id) { + public Response getEmployeeById(final @PathParam("id") Long id) throws IOException { Objects.requireNonNull(id); - final GetResponse response = client.prepareGet("megacorp", "employee", Long.toString(id)).get(); + final GetResponse response = client.get(new GetRequest("megacorp", Long.toString(id)), RequestOptions.DEFAULT); if (response.isExists()) { final Map source = response.getSource(); final Employee employee = new Employee(); @@ -94,7 +96,10 @@ public Response putEmployeeById(final @PathParam("id") Long id, final Employee e } } builder.endObject(); - final IndexResponse response = client.prepareIndex("megacorp", "employee", Long.toString(id)).setSource(builder).get(); + final IndexRequest request = new IndexRequest("megacorp"); + request.id(Long.toString(id)); + request.source(builder); + final IndexResponse response = client.index(request, RequestOptions.DEFAULT); if (response.status().getStatus() == 201) { return Response.created(new URI("/employees/" + id)).build(); } else { diff --git a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientProducer.java b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientProducer.java similarity index 58% rename from qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientProducer.java rename to qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientProducer.java index 7c234bce6cdb7..5d924192342ef 100644 --- a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportClientProducer.java +++ b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelClientProducer.java @@ -19,46 +19,34 @@ package org.elasticsearch.wildfly.transport; -import org.elasticsearch.client.transport.TransportClient; +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.transport.client.PreBuiltTransportClient; import javax.enterprise.inject.Produces; - import java.io.IOException; import java.io.InputStream; -import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; import java.util.Properties; @SuppressWarnings("unused") -public final class TransportClientProducer { +public final class RestHighLevelClientProducer { @Produces - public TransportClient createTransportClient() throws IOException { + public RestHighLevelClient createRestHighLevelClient() throws IOException { final String elasticsearchProperties = System.getProperty("elasticsearch.properties"); final Properties properties = new Properties(); - final String transportUri; - final String clusterName; + final String httpUri; try (InputStream is = Files.newInputStream(getPath(elasticsearchProperties))) { properties.load(is); - transportUri = properties.getProperty("transport.uri"); - clusterName = properties.getProperty("cluster.name"); + httpUri = properties.getProperty("http.uri"); } - final int lastColon = transportUri.lastIndexOf(':'); - final String host = transportUri.substring(0, lastColon); - final int port = Integer.parseInt(transportUri.substring(lastColon + 1)); - final Settings settings = Settings.builder().put("cluster.name", clusterName).build(); - final TransportClient transportClient = new PreBuiltTransportClient(settings, Collections.emptyList()); - transportClient.addTransportAddress(new TransportAddress(InetAddress.getByName(host), port)); - return transportClient; + return new RestHighLevelClient(RestClient.builder(HttpHost.create(httpUri))); } @SuppressForbidden(reason = "get path not configured in environment") diff --git a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportJacksonJsonProvider.java b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelJacksonJsonProvider.java similarity index 92% rename from qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportJacksonJsonProvider.java rename to qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelJacksonJsonProvider.java index 07585780c0665..50568790ca064 100644 --- a/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/TransportJacksonJsonProvider.java +++ b/qa/wildfly/src/main/java/org/elasticsearch/wildfly/transport/RestHighLevelJacksonJsonProvider.java @@ -24,5 +24,5 @@ import javax.ws.rs.ext.Provider; @Provider -public class TransportJacksonJsonProvider extends ResteasyJackson2Provider { +public class RestHighLevelJacksonJsonProvider extends ResteasyJackson2Provider { } diff --git a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java index 9aebffdc4ce3f..28e11f021a1c7 100644 --- a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java @@ -53,7 +53,7 @@ @TestRuleLimitSysouts.Limit(bytes = 14000) public class WildflyIT extends LuceneTestCase { - Logger logger = Logger.getLogger(WildflyIT.class); + private Logger logger = Logger.getLogger(WildflyIT.class); public void testTransportClient() throws URISyntaxException, IOException { try (CloseableHttpClient client = HttpClientBuilder.create().build()) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json index f21d2606364d1..65fcf02807ba1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/create.json @@ -33,10 +33,6 @@ "type" : "string", "description" : "Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)" }, - "parent": { - "type" : "string", - "description" : "ID of the parent document" - }, "refresh": { "type" : "enum", "options": ["true", "false", "wait_for"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json index 792f9d89609bf..0152374028832 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete.json @@ -33,10 +33,6 @@ "type" : "string", "description" : "Sets the number of shard copies that must be active before proceeding with the delete operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)" }, - "parent": { - "type" : "string", - "description" : "ID of parent document" - }, "refresh": { "type" : "enum", "options": ["true", "false", "wait_for"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 37b88e0f24d93..aa1d8b4be4f26 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -5,23 +5,12 @@ "url": { "path": "/{index}/_delete_by_query", "paths": ["/{index}/_delete_by_query"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_delete_by_query", - "description" : "Specifying types in urls has been deprecated" - } - ], "comment": "most things below this are just copied from search.json", "parts": { "index": { "required" : true, "type" : "list", "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" } }, "params": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json index 3debd3edce585..2a451344521e4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists.json @@ -33,10 +33,6 @@ "type": "list", "description" : "A comma-separated list of stored fields to return in the response" }, - "parent": { - "type" : "string", - "description" : "The ID of the parent document" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json index 89f9c33e5fb44..30e56141ec001 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/exists_source.json @@ -30,10 +30,6 @@ } }, "params": { - "parent": { - "type" : "string", - "description" : "The ID of the parent document" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json index 12aa7a8dca942..203ef23c9cc10 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json @@ -55,10 +55,6 @@ "type" : "boolean", "description" : "Specify whether format-based query failures (such as providing text to a numeric field) should be ignored" }, - "parent": { - "type" : "string", - "description" : "The ID of the parent document" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json index 5b2203c94deb9..f4e0fdd5f90ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get.json @@ -33,10 +33,6 @@ "type": "list", "description" : "A comma-separated list of stored fields to return in the response" }, - "parent": { - "type" : "string", - "description" : "The ID of the parent document" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json index a26691edc41fc..d6f6964aa7c36 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_source.json @@ -30,10 +30,6 @@ } }, "params": { - "parent": { - "type" : "string", - "description" : "The ID of the parent document" - }, "preference": { "type" : "string", "description" : "Specify the node or shard the operation should be performed on (default: random)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json index 2a2053d2250a0..438032980a3c5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/index.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/index.json @@ -43,10 +43,6 @@ "default" : "index", "description" : "Explicit operation type" }, - "parent": { - "type" : "string", - "description" : "ID of the parent document" - }, "refresh": { "type" : "enum", "options": ["true", "false", "wait_for"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json index 9c416e841362a..647ed9ed3ac77 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch.json @@ -5,21 +5,10 @@ "url": { "path": "/_msearch", "paths": ["/_msearch", "/{index}/_msearch"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_msearch", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts": { "index": { "type" : "list", "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" } }, "params": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 87aa6b22b8d45..7bc957d98ff29 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -5,21 +5,10 @@ "url": { "path": "/_msearch/template", "paths": ["/_msearch/template", "/{index}/_msearch/template"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_msearch/template", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts": { "index": { "type" : "list", "description" : "A comma-separated list of index names to use as default" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to use as default" } }, "params": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json index 8cf4b22e90da9..aaff8e73259cf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/mtermvectors.json @@ -5,21 +5,10 @@ "url" : { "path" : "/_mtermvectors", "paths" : ["/_mtermvectors", "/{index}/_mtermvectors"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_mtermvectors", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts" : { "index" : { "type" : "string", "description" : "The index in which the document resides." - }, - "type" : { - "type" : "string", - "description" : "The type of the document." } }, "params" : { @@ -73,11 +62,6 @@ "description" : "Specific routing value. Applies to all returned documents unless otherwise specified in body \"params\" or \"docs\".", "required" : false }, - "parent" : { - "type" : "string", - "description" : "Parent id of documents. Applies to all returned documents unless otherwise specified in body \"params\" or \"docs\".", - "required" : false - }, "realtime": { "type": "boolean", "description": "Specifies if requests are real-time as opposed to near-real-time (default: true).", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 00ed8d113a00c..08d753ee9d558 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -5,21 +5,10 @@ "url": { "path": "/_search", "paths": ["/_search", "/{index}/_search"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_search", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts": { "index": { "type" : "list", "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" } }, "params": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json index 5395e4f59c10e..81a293d4a9b34 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json @@ -5,21 +5,10 @@ "url": { "path": "/_search/template", "paths": ["/_search/template", "/{index}/_search/template"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_search/template", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts": { "index": { "type" : "list", "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" } }, "params" : { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json index 44b972b355f68..bbbdc7c87ad0b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/termvectors.json @@ -5,29 +5,12 @@ "url" : { "path" : "/{index}/_termvectors/{id}", "paths" : ["/{index}/_termvectors/{id}", "/{index}/_termvectors"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/{id}/_termvectors", - "description" : "Specifying types in urls has been deprecated" - }, - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_termvectors", - "description" : "Specifying types in urls has been deprecated" - } - ], "parts" : { "index" : { "type" : "string", "description" : "The index in which the document resides.", "required" : true }, - "type" : { - "type" : "string", - "description" : "The type of the document.", - "required" : false - }, "id" : { "type" : "string", "description" : "The id of the document, when not specified a doc param should be supplied." @@ -79,11 +62,6 @@ "description" : "Specific routing value.", "required" : false }, - "parent": { - "type" : "string", - "description" : "Parent id of documents.", - "required" : false - }, "realtime": { "type": "boolean", "description": "Specifies if request is real-time as opposed to near-real-time (default: true).", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index b85c70be57d9e..02435190674cf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -49,10 +49,6 @@ "type": "string", "description": "The script language (default: painless)" }, - "parent": { - "type": "string", - "description": "ID of the parent document. Is is only used for routing and when for the upsert request" - }, "refresh": { "type" : "enum", "options": ["true", "false", "wait_for"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index bdb4c565bd965..10b6e6d87f277 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -5,23 +5,12 @@ "url": { "path": "/{index}/_update_by_query", "paths": ["/{index}/_update_by_query"], - "deprecated_paths" : [ - { - "version" : "7.0.0", - "path" : "/{index}/{type}/_update_by_query", - "description" : "Specifying types in urls has been deprecated" - } - ], "comment": "most things below this are just copied from search.json", "parts": { "index": { "required" : true, "type" : "list", "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" } }, "params": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/30_segments.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/30_segments.yml index 2d4d804063220..e4e820ff2b8bb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/30_segments.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/30_segments.yml @@ -1,9 +1,5 @@ --- setup: - - skip: - version: "all" - reason: "AwaitsFix: https://github.com/elastic/elasticsearch/issues/40331" - - do: indices.create: index: test @@ -47,6 +43,7 @@ setup: - do: indices.close: index: test + wait_for_active_shards: all - do: indices.stats: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml deleted file mode 100644 index 0c037eee9ddd2..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/11_basic_with_types.yml +++ /dev/null @@ -1,86 +0,0 @@ -setup: - - do: - indices.create: - include_type_name: true - index: testidx - body: - mappings: - testtype: - properties: - text: - type : "text" - term_vector : "with_positions_offsets" - - do: - index: - index: testidx - type: testtype - id: testing_document - body: {"text" : "The quick brown fox is brown."} - - - do: - indices.refresh: {} - ---- -"Basic tests for multi termvector get": - - - do: - mtermvectors: - "term_statistics" : true - "body" : - "docs": - - - "_index" : "testidx" - "_type" : "testtype" - "_id" : "testing_document" - - - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2} - - match: {docs.0.term_vectors.text.terms.brown.ttf: 2} - - - do: - mtermvectors: - "term_statistics" : true - "body" : - "docs": - - - "_index" : "testidx" - "_type" : "testtype" - "_id" : "testing_document" - - - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2} - - match: {docs.0.term_vectors.text.terms.brown.ttf: 2} - - - do: - mtermvectors: - "term_statistics" : true - "index" : "testidx" - "body" : - "docs": - - - "_type" : "testtype" - "_id" : "testing_document" - - - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2} - - match: {docs.0.term_vectors.text.terms.brown.ttf: 2} - - - do: - mtermvectors: - "term_statistics" : true - "index" : "testidx" - "type" : "testtype" - "body" : - "docs": - - - "_id" : "testing_document" - - - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2} - - match: {docs.0.term_vectors.text.terms.brown.ttf: 2} - - - do: - mtermvectors: - "term_statistics" : true - "index" : "testidx" - "type" : "testtype" - "ids" : ["testing_document"] - - - match: {docs.0.term_vectors.text.terms.brown.term_freq: 2} - - match: {docs.0.term_vectors.text.terms.brown.ttf: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml deleted file mode 100644 index 3e39a33e1061d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/21_deprecated_with_types.yml +++ /dev/null @@ -1,51 +0,0 @@ - ---- -"Deprecated camel case and _ parameters should fail in Term Vectors query": - - - skip: - features: "warnings" - - - do: - indices.create: - include_type_name: true - index: testidx - body: - mappings: - testtype: - properties: - text: - type : "text" - term_vector : "with_positions_offsets" - - - do: - index: - index: testidx - type: testtype - id: testing_document - body: {"text" : "The quick brown fox is brown."} - - - do: - catch: bad_request - mtermvectors: - "term_statistics" : true - "body" : - "docs": - - - "_index" : "testidx" - "_type" : "testtype" - "_id" : "testing_document" - "version" : 1 - "versionType" : "external" - - - do: - catch: bad_request - mtermvectors: - "term_statistics" : true - "body" : - "docs": - - - "_index" : "testidx" - "_type" : "testtype" - "_id" : "testing_document" - "version" : 1 - "_version_type" : "external" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml deleted file mode 100644 index 51d8e23dbfa62..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mtermvectors/30_mix_typeless_typeful.yml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"mtermvectors without types on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type : "text" - term_vector : "with_positions_offsets" - - - do: - index: - index: index - id: 1 - body: { foo: bar } - - - do: - mtermvectors: - body: - docs: - - _index: index - _id: 1 - - - match: {docs.0.term_vectors.foo.terms.bar.term_freq: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml index a6b7cae104418..e0183f0c54f66 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml @@ -251,8 +251,20 @@ setup: --- "Bad params": + - skip: + version: " - 7.1.99" + reason: "empty bodies throws exception starting in 7.2" + - do: + catch: /\[filters\] cannot be empty/ + search: + rest_total_hits_as_int: true + body: + aggs: + the_filter: + filters: {} - do: + catch: /\[filters\] cannot be empty/ search: rest_total_hits_as_int: true body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index c85ca9ee676f6..9adf6343fbf95 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -75,6 +75,7 @@ setup: --- "Max bucket": + - do: cluster.put_settings: body: @@ -85,6 +86,7 @@ setup: catch: /.*Trying to create too many buckets.*/ search: rest_total_hits_as_int: true + allow_partial_search_results: false index: test body: aggregations: @@ -102,6 +104,7 @@ setup: catch: /.*Trying to create too many buckets.*/ search: rest_total_hits_as_int: true + allow_partial_search_results: false index: test body: aggregations: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index b15a48f52a43e..df6664141c3b3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -165,20 +165,6 @@ setup: docvalue_fields: [ "count" ] - match: { hits.hits.0.fields.count: [1] } ---- -"docvalue_fields with default format": - - skip: - features: warnings - - do: - warnings: - - "[use_field_mapping] is a special format that was only used to ease the transition to 7.x. It has become the default and shouldn't be set explicitly anymore." - search: - body: - docvalue_fields: - - field: "count" - format: "use_field_mapping" - - match: { hits.hits.0.fields.count: [1] } - --- "docvalue_fields with explicit format": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml deleted file mode 100644 index 992d6db7ca786..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/11_basic_with_types.yml +++ /dev/null @@ -1,36 +0,0 @@ -setup: - - do: - indices.create: - include_type_name: true - index: testidx - body: - mappings: - testtype: - "properties": - "text": - "type" : "text" - "term_vector" : "with_positions_offsets" - - do: - index: - index: testidx - type: testtype - id: testing_document - body: - "text" : "The quick brown fox is brown." - - do: - indices.refresh: {} - ---- -"Basic tests for termvector get": - - - do: - termvectors: - index: testidx - type: testtype - id: testing_document - "term_statistics" : true - - - - match: {term_vectors.text.field_statistics.sum_doc_freq: 5} - - match: {term_vectors.text.terms.brown.doc_freq: 1} - - match: {term_vectors.text.terms.brown.tokens.0.start_offset: 10} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml deleted file mode 100644 index cf597bf141f61..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/21_issue7121_with_types.yml +++ /dev/null @@ -1,42 +0,0 @@ -"Term vector API should return 'found: false' for docs between index and refresh": - - do: - indices.create: - include_type_name: true - index: testidx - body: - settings: - index: - translog.flush_threshold_size: "512MB" - number_of_shards: 1 - number_of_replicas: 0 - refresh_interval: -1 - mappings: - doc: - properties: - text: - type : "text" - term_vector : "with_positions_offsets" - - - do: - cluster.health: - wait_for_status: green - - - do: - index: - index: testidx - type: doc - id: 1 - body: - text : "foo bar" - - - do: - termvectors: - index: testidx - type: doc - id: 1 - realtime: false - - - match: { _index: "testidx" } - - match: { _type: "doc" } - - match: { _id: "1" } - - is_false: found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml deleted file mode 100644 index 26f441207ace8..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/31_realtime_with_types.yml +++ /dev/null @@ -1,40 +0,0 @@ ---- -"Realtime Term Vectors": - - - do: - indices.create: - index: test_1 - body: - settings: - index: - refresh_interval: -1 - number_of_replicas: 0 - - - do: - cluster.health: - wait_for_status: green - - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - - do: - termvectors: - index: test_1 - type: test - id: 1 - realtime: false - - - is_false: found - - - do: - termvectors: - index: test_1 - type: test - id: 1 - realtime: true - - - is_true: found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml deleted file mode 100644 index 5801025654cf6..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/50_mix_typeless_typeful.yml +++ /dev/null @@ -1,42 +0,0 @@ ---- -"Term vectors with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "text" - term_vector: "with_positions" - - - do: - index: - index: index - type: not_doc - id: 1 - body: { foo: bar } - - - do: - indices.refresh: {} - - - do: - termvectors: - index: index - type: _doc # todo: remove when termvectors support typeless API - id: 1 - - - is_true: found - - match: {_type: _doc} - - match: {term_vectors.foo.terms.bar.term_freq: 1} - - - do: - termvectors: - index: index - type: some_random_type - id: 1 - - - is_false: found diff --git a/server/licenses/joda-time-2.10.1.jar.sha1 b/server/licenses/joda-time-2.10.1.jar.sha1 deleted file mode 100644 index 75e809754ecee..0000000000000 --- a/server/licenses/joda-time-2.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ac3dbf89dbf2ee385185dd0cd3064fe789efee0 \ No newline at end of file diff --git a/server/licenses/joda-time-2.10.2.jar.sha1 b/server/licenses/joda-time-2.10.2.jar.sha1 new file mode 100644 index 0000000000000..9cbac57161c8e --- /dev/null +++ b/server/licenses/joda-time-2.10.2.jar.sha1 @@ -0,0 +1 @@ +a079fc39ccc3de02acdeb7117443e5d9bd431687 \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 1700979c32d64..c696d476bbb43 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -118,7 +118,9 @@ protected void blend(final TermStates[] contexts, int maxDoc, IndexReader reader // otherwise the statistics don't match minSumTTF = Math.min(minSumTTF, reader.getSumTotalTermFreq(terms[i].field())); } - + } + if (maxDoc > minSumTTF) { + maxDoc = (int)minSumTTF; } if (max == 0) { return; // we are done that term doesn't exist at all diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 49c5509ca1c18..0a6b19444efa7 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -46,40 +46,6 @@ public class Version implements Comparable, ToXContentFragment { */ public static final int V_EMPTY_ID = 0; public static final Version V_EMPTY = new Version(V_EMPTY_ID, org.apache.lucene.util.Version.LATEST); - public static final int V_6_0_0_alpha1_ID = 6000001; - public static final Version V_6_0_0_alpha1 = - new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final int V_6_0_0_alpha2_ID = 6000002; - public static final Version V_6_0_0_alpha2 = - new Version(V_6_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final int V_6_0_0_beta1_ID = 6000026; - public static final Version V_6_0_0_beta1 = - new Version(V_6_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final int V_6_0_0_beta2_ID = 6000027; - public static final Version V_6_0_0_beta2 = - new Version(V_6_0_0_beta2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final int V_6_0_0_rc1_ID = 6000051; - public static final Version V_6_0_0_rc1 = - new Version(V_6_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final int V_6_0_0_rc2_ID = 6000052; - public static final Version V_6_0_0_rc2 = - new Version(V_6_0_0_rc2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); - public static final int V_6_0_0_ID = 6000099; - public static final Version V_6_0_0 = - new Version(V_6_0_0_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); - public static final int V_6_0_1_ID = 6000199; - public static final Version V_6_0_1 = - new Version(V_6_0_1_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); - public static final int V_6_1_0_ID = 6010099; - public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final int V_6_1_1_ID = 6010199; - public static final Version V_6_1_1 = new Version(V_6_1_1_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final int V_6_1_2_ID = 6010299; - public static final Version V_6_1_2 = new Version(V_6_1_2_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final int V_6_1_3_ID = 6010399; - public static final Version V_6_1_3 = new Version(V_6_1_3_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); - public static final int V_6_1_4_ID = 6010499; - public static final Version V_6_1_4 = new Version(V_6_1_4_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); // The below version is missing from the 7.3 JAR private static final org.apache.lucene.util.Version LUCENE_7_2_1 = org.apache.lucene.util.Version.fromBits(7, 2, 1); public static final int V_6_2_0_ID = 6020099; @@ -128,18 +94,18 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_7_1 = new Version(V_6_7_1_ID, org.apache.lucene.util.Version.LUCENE_7_7_0); public static final int V_6_7_2_ID = 6070299; public static final Version V_6_7_2 = new Version(V_6_7_2_ID, org.apache.lucene.util.Version.LUCENE_7_7_0); - public static final int V_6_7_3_ID = 6070399; - public static final Version V_6_7_3 = new Version(V_6_7_3_ID, org.apache.lucene.util.Version.LUCENE_7_7_0); public static final int V_6_8_0_ID = 6080099; public static final Version V_6_8_0 = new Version(V_6_8_0_ID, org.apache.lucene.util.Version.LUCENE_7_7_0); + public static final int V_6_8_1_ID = 6080199; + public static final Version V_6_8_1 = new Version(V_6_8_1_ID, org.apache.lucene.util.Version.LUCENE_7_7_0); public static final int V_7_0_0_ID = 7000099; public static final Version V_7_0_0 = new Version(V_7_0_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final int V_7_0_1_ID = 7000199; public static final Version V_7_0_1 = new Version(V_7_0_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); - public static final int V_7_0_2_ID = 7000299; - public static final Version V_7_0_2 = new Version(V_7_0_2_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final int V_7_1_0_ID = 7010099; public static final Version V_7_1_0 = new Version(V_7_1_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); + public static final int V_7_1_1_ID = 7010199; + public static final Version V_7_1_1 = new Version(V_7_1_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final int V_7_2_0_ID = 7020099; public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final int V_8_0_0_ID = 8000099; @@ -162,18 +128,18 @@ public static Version fromId(int id) { return V_8_0_0; case V_7_2_0_ID: return V_7_2_0; + case V_7_1_1_ID: + return V_7_1_1; case V_7_1_0_ID: return V_7_1_0; - case V_7_0_2_ID: - return V_7_0_2; case V_7_0_1_ID: return V_7_0_1; case V_7_0_0_ID: return V_7_0_0; + case V_6_8_1_ID: + return V_6_8_1; case V_6_8_0_ID: return V_6_8_0; - case V_6_7_3_ID: - return V_6_7_3; case V_6_7_1_ID: return V_6_7_1; case V_6_7_2_ID: @@ -220,32 +186,6 @@ public static Version fromId(int id) { return V_6_2_1; case V_6_2_0_ID: return V_6_2_0; - case V_6_1_4_ID: - return V_6_1_4; - case V_6_1_3_ID: - return V_6_1_3; - case V_6_1_2_ID: - return V_6_1_2; - case V_6_1_1_ID: - return V_6_1_1; - case V_6_1_0_ID: - return V_6_1_0; - case V_6_0_1_ID: - return V_6_0_1; - case V_6_0_0_ID: - return V_6_0_0; - case V_6_0_0_rc2_ID: - return V_6_0_0_rc2; - case V_6_0_0_beta2_ID: - return V_6_0_0_beta2; - case V_6_0_0_rc1_ID: - return V_6_0_0_rc1; - case V_6_0_0_beta1_ID: - return V_6_0_0_beta1; - case V_6_0_0_alpha2_ID: - return V_6_0_0_alpha2; - case V_6_0_0_alpha1_ID: - return V_6_0_0_alpha1; case V_EMPTY_ID: return V_EMPTY; default: @@ -445,7 +385,7 @@ public Version minimumCompatibilityVersion() { return bwcVersion == null ? this : bwcVersion; } - return Version.min(this, fromId((int) major * 1000000 + 0 * 10000 + 99)); + return Version.min(this, fromId(major * 1000000 + 0 * 10000 + 99)); } /** @@ -457,8 +397,6 @@ public Version minimumIndexCompatibilityVersion() { final int bwcMajor; if (major == 5) { bwcMajor = 2; // we jumped from 2 to 5 - } else if (major == 7) { - return V_6_0_0_beta1; } else { bwcMajor = major - 1; } diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 465a93914410b..80225c3a60ce4 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -259,7 +259,7 @@ public String getLocation(@Nullable String routing) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); type = in.readString(); id = in.readString(); version = in.readZLong(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index fe07a4efe930e..d1d72da544560 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -203,27 +203,15 @@ void getFinishedTaskFromIndex(Task thisTask, GetTaskRequest request, ActionListe request.getTaskId().toString()); get.setParentTask(clusterService.localNode().getId(), thisTask.getId()); - client.get(get, new ActionListener() { - @Override - public void onResponse(GetResponse getResponse) { - try { - onGetFinishedTaskFromIndex(getResponse, listener); - } catch (Exception e) { - listener.onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrap(e, IndexNotFoundException.class) != null) { - // We haven't yet created the index for the task results so it can't be found. - listener.onFailure(new ResourceNotFoundException("task [{}] isn't running and hasn't stored its results", e, - request.getTaskId())); - } else { - listener.onFailure(e); - } + client.get(get, ActionListener.wrap(r -> onGetFinishedTaskFromIndex(r, listener), e -> { + if (ExceptionsHelper.unwrap(e, IndexNotFoundException.class) != null) { + // We haven't yet created the index for the task results so it can't be found. + listener.onFailure(new ResourceNotFoundException("task [{}] isn't running and hasn't stored its results", e, + request.getTaskId())); + } else { + listener.onFailure(e); } - }); + })); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java index 1c6bf90c4149d..d8948958d7a07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java @@ -40,7 +40,7 @@ public ClusterSearchShardsGroup(ShardId shardId, ShardRouting[] shards) { } ClusterSearchShardsGroup(StreamInput in) throws IOException { - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); shards = new ShardRouting[in.readVInt()]; for (int i = 0; i < shards.length; i++) { shards[i] = new ShardRouting(shardId, in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 1737756fe7db9..8f71090cc469f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -214,7 +214,7 @@ public void readFrom(StreamInput in) throws IOException { int numberOfShards = in.readVInt(); Map shardMapBuilder = new HashMap<>(numberOfShards); for (int j = 0; j < numberOfShards; j++) { - ShardId shardId = ShardId.readShardId(in); + ShardId shardId = new ShardId(in); SnapshotIndexShardStatus status = SnapshotIndexShardStatus.readShardSnapshotStatus(in); shardMapBuilder.put(shardId, status); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 5dfc24d1e280e..c2f0d3dd0c074 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -119,23 +119,11 @@ protected void masterOperation(final SnapshotsStatusRequest request, TransportNodesSnapshotsStatus.Request nodesRequest = new TransportNodesSnapshotsStatus.Request(nodesIds.toArray(new String[nodesIds.size()])) .snapshots(snapshots).timeout(request.masterNodeTimeout()); - transportNodesSnapshotsStatus.execute(nodesRequest, new ActionListener() { - @Override - public void onResponse(TransportNodesSnapshotsStatus.NodesSnapshotStatus nodeSnapshotStatuses) { - try { - List currentSnapshots = - snapshotsService.currentSnapshots(request.repository(), Arrays.asList(request.snapshots())); - listener.onResponse(buildResponse(request, currentSnapshots, nodeSnapshotStatuses)); - } catch (Exception e) { - listener.onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + transportNodesSnapshotsStatus.execute(nodesRequest, + ActionListener.map( + listener, nodeSnapshotStatuses -> + buildResponse(request, snapshotsService.currentSnapshots(request.repository(), Arrays.asList(request.snapshots())), + nodeSnapshotStatuses))); } else { // We don't have any in-progress shards, just return current stats listener.onResponse(buildResponse(request, currentSnapshots, null)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index e2bbd655992de..3677cd6cb4e43 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class AnalyzeAction extends Action { @@ -30,8 +31,13 @@ private AnalyzeAction() { super(NAME); } + @Override + public Writeable.Reader getResponseReader() { + return AnalyzeResponse::new; + } + @Override public AnalyzeResponse newResponse() { - return new AnalyzeResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java index e571db951cbc1..7e6d525cefb93 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -43,17 +43,14 @@ public class AnalyzeResponse extends ActionResponse implements Iterable, ToXContentObject { - public static class AnalyzeToken implements Streamable, ToXContentObject { - private String term; - private int startOffset; - private int endOffset; - private int position; - private int positionLength = 1; - private Map attributes; - private String type; - - AnalyzeToken() { - } + public static class AnalyzeToken implements Writeable, ToXContentObject { + private final String term; + private final int startOffset; + private final int endOffset; + private final int position; + private final int positionLength; + private final Map attributes; + private final String type; @Override public boolean equals(Object o) { @@ -74,7 +71,7 @@ public int hashCode() { return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); } - public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, + AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, String type, Map attributes) { this.term = term; this.position = position; @@ -85,6 +82,21 @@ public AnalyzeToken(String term, int position, int startOffset, int endOffset, i this.attributes = attributes; } + AnalyzeToken(StreamInput in) throws IOException { + term = in.readString(); + startOffset = in.readInt(); + endOffset = in.readInt(); + position = in.readVInt(); + Integer len = in.readOptionalVInt(); + if (len != null) { + positionLength = len; + } else { + positionLength = 1; + } + type = in.readOptionalString(); + attributes = in.readMap(); + } + public String getTerm() { return this.term; } @@ -134,12 +146,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException { - AnalyzeToken analyzeToken = new AnalyzeToken(); - analyzeToken.readFrom(in); - return analyzeToken; - } - public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); String field = null; @@ -184,22 +190,6 @@ public static AnalyzeToken fromXContent(XContentParser parser) throws IOExceptio return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); } - @Override - public void readFrom(StreamInput in) throws IOException { - term = in.readString(); - startOffset = in.readInt(); - endOffset = in.readInt(); - position = in.readVInt(); - Integer len = in.readOptionalVInt(); - if (len != null) { - positionLength = len; - } else { - positionLength = 1; - } - type = in.readOptionalString(); - attributes = in.readMap(); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(term); @@ -212,18 +202,34 @@ public void writeTo(StreamOutput out) throws IOException { } } - private DetailAnalyzeResponse detail; - - private List tokens; - - AnalyzeResponse() { - } + private final DetailAnalyzeResponse detail; + private final List tokens; public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { this.tokens = tokens; this.detail = detail; } + public AnalyzeResponse(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + if (size > 0) { + tokens = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + tokens.add(new AnalyzeToken(in)); + } + } + else { + tokens = null; + } + detail = in.readOptionalWriteable(DetailAnalyzeResponse::new); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + public List getTokens() { return this.tokens; } @@ -268,20 +274,6 @@ public static AnalyzeResponse fromXContent(XContentParser parser) throws IOExcep return PARSER.parse(parser, null); } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - tokens = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - tokens.add(AnalyzeToken.readAnalyzeToken(in)); - } - if (tokens.size() == 0) { - tokens = null; - } - detail = in.readOptionalStreamable(DetailAnalyzeResponse::new); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -293,7 +285,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeVInt(0); } - out.writeOptionalStreamable(detail); + out.writeOptionalWriteable(detail); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java index b44354047be03..1e84d9e0a2e1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -40,16 +40,13 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DetailAnalyzeResponse implements Streamable, ToXContentFragment { +public class DetailAnalyzeResponse implements Writeable, ToXContentFragment { - private boolean customAnalyzer = false; - private AnalyzeTokenList analyzer; - private CharFilteredText[] charfilters; - private AnalyzeTokenList tokenizer; - private AnalyzeTokenList[] tokenfilters; - - DetailAnalyzeResponse() { - } + private final boolean customAnalyzer; + private final AnalyzeTokenList analyzer; + private final CharFilteredText[] charfilters; + private final AnalyzeTokenList tokenizer; + private final AnalyzeTokenList[] tokenfilters; public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { this(false, analyzer, null, null, null); @@ -71,46 +68,55 @@ public DetailAnalyzeResponse(boolean customAnalyzer, this.tokenfilters = tokenfilters; } - public AnalyzeTokenList analyzer() { - return this.analyzer; + public DetailAnalyzeResponse(StreamInput in) throws IOException { + this.customAnalyzer = in.readBoolean(); + if (customAnalyzer) { + tokenizer = new AnalyzeTokenList(in); + int size = in.readVInt(); + if (size > 0) { + charfilters = new CharFilteredText[size]; + for (int i = 0; i < size; i++) { + charfilters[i] = new CharFilteredText(in); + } + } + else { + charfilters = null; + } + size = in.readVInt(); + if (size > 0) { + tokenfilters = new AnalyzeTokenList[size]; + for (int i = 0; i < size; i++) { + tokenfilters[i] = new AnalyzeTokenList(in); + } + } + else { + tokenfilters = null; + } + analyzer = null; + } else { + analyzer = new AnalyzeTokenList(in); + tokenfilters = null; + tokenizer = null; + charfilters = null; + } } - public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) { - this.customAnalyzer = false; - this.analyzer = analyzer; - return this; + public AnalyzeTokenList analyzer() { + return this.analyzer; } public CharFilteredText[] charfilters() { return this.charfilters; } - public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) { - this.customAnalyzer = true; - this.charfilters = charfilters; - return this; - } - public AnalyzeTokenList tokenizer() { return tokenizer; } - public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) { - this.customAnalyzer = true; - this.tokenizer = tokenizer; - return this; - } - public AnalyzeTokenList[] tokenfilters() { return tokenfilters; } - public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) { - this.customAnalyzer = true; - this.tokenfilters = tokenfilters; - return this; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -201,30 +207,6 @@ static final class Fields { static final String TOKENFILTERS = "tokenfilters"; } - @Override - public void readFrom(StreamInput in) throws IOException { - this.customAnalyzer = in.readBoolean(); - if (customAnalyzer) { - tokenizer = AnalyzeTokenList.readAnalyzeTokenList(in); - int size = in.readVInt(); - if (size > 0) { - charfilters = new CharFilteredText[size]; - for (int i = 0; i < size; i++) { - charfilters[i] = CharFilteredText.readCharFilteredText(in); - } - } - size = in.readVInt(); - if (size > 0) { - tokenfilters = new AnalyzeTokenList[size]; - for (int i = 0; i < size; i++) { - tokenfilters[i] = AnalyzeTokenList.readAnalyzeTokenList(in); - } - } - } else { - analyzer = AnalyzeTokenList.readAnalyzeTokenList(in); - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(customAnalyzer); @@ -251,9 +233,9 @@ public void writeTo(StreamOutput out) throws IOException { } } - public static class AnalyzeTokenList implements Streamable, ToXContentObject { - private String name; - private AnalyzeResponse.AnalyzeToken[] tokens; + public static class AnalyzeTokenList implements Writeable, ToXContentObject { + private final String name; + private final AnalyzeResponse.AnalyzeToken[] tokens; @Override public boolean equals(Object o) { @@ -271,14 +253,25 @@ public int hashCode() { return result; } - AnalyzeTokenList() { - } - public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { this.name = name; this.tokens = tokens; } + public AnalyzeTokenList(StreamInput in) throws IOException { + name = in.readString(); + int size = in.readVInt(); + if (size > 0) { + tokens = new AnalyzeResponse.AnalyzeToken[size]; + for (int i = 0; i < size; i++) { + tokens[i] = new AnalyzeResponse.AnalyzeToken(in); + } + } + else { + tokens = null; + } + } + public String getName() { return name; } @@ -287,12 +280,6 @@ public AnalyzeResponse.AnalyzeToken[] getTokens() { return tokens; } - public static AnalyzeTokenList readAnalyzeTokenList(StreamInput in) throws IOException { - AnalyzeTokenList list = new AnalyzeTokenList(); - list.readFrom(in); - return list; - } - XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { builder.field(Fields.NAME, this.name); builder.startArray(AnalyzeResponse.Fields.TOKENS); @@ -327,18 +314,6 @@ public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOExce return PARSER.parse(parser, null); } - @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString(); - int size = in.readVInt(); - if (size > 0) { - tokens = new AnalyzeResponse.AnalyzeToken[size]; - for (int i = 0; i < size; i++) { - tokens[i] = AnalyzeResponse.AnalyzeToken.readAnalyzeToken(in); - } - } - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); @@ -353,12 +328,9 @@ public void writeTo(StreamOutput out) throws IOException { } } - public static class CharFilteredText implements Streamable, ToXContentObject { - private String name; - private String[] texts; - - CharFilteredText() { - } + public static class CharFilteredText implements Writeable, ToXContentObject { + private final String name; + private final String[] texts; public CharFilteredText(String name, String[] texts) { this.name = name; @@ -369,6 +341,11 @@ public CharFilteredText(String name, String[] texts) { } } + public CharFilteredText(StreamInput in) throws IOException { + name = in.readString(); + texts = in.readStringArray(); + } + public String getName() { return name; } @@ -398,18 +375,6 @@ public static CharFilteredText fromXContent(XContentParser parser) throws IOExce return PARSER.parse(parser, null); } - public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException { - CharFilteredText text = new CharFilteredText(); - text.readFrom(in); - return text; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - name = in.readString(); - texts = in.readStringArray(); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 9538bd4b4d22c..55bd593742667 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; @@ -49,9 +50,9 @@ import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NormalizingCharFilterFactory; import org.elasticsearch.index.analysis.NormalizingTokenFilterFactory; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -96,8 +97,8 @@ public TransportAnalyzeAction(Settings settings, ThreadPool threadPool, ClusterS } @Override - protected AnalyzeResponse newResponse() { - return new AnalyzeResponse(); + protected Writeable.Reader getResponseReader() { + return AnalyzeResponse::new; } @Override @@ -140,14 +141,8 @@ protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId } MappedFieldType fieldType = indexService.mapperService().fullName(request.field()); if (fieldType != null) { - if (fieldType.tokenized()) { + if (fieldType.tokenized() || fieldType instanceof KeywordFieldMapper.KeywordFieldType) { analyzer = fieldType.indexAnalyzer(); - } else if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - analyzer = ((KeywordFieldMapper.KeywordFieldType) fieldType).normalizer(); - if (analyzer == null) { - // this will be KeywordAnalyzer - analyzer = fieldType.indexAnalyzer(); - } } else { throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are only supported on tokenized fields"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java index 5aa19652b676d..d372d8cf93f30 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class GetFieldMappingsAction extends Action { @@ -32,6 +33,11 @@ private GetFieldMappingsAction() { @Override public GetFieldMappingsResponse newResponse() { - return new GetFieldMappingsResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return GetFieldMappingsResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index d3200bc1e1d9a..e3be9e6834287 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -92,9 +92,33 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte this.mappings = mappings; } + GetFieldMappingsResponse() { } + GetFieldMappingsResponse(StreamInput in) throws IOException { + super(in); + int size = in.readVInt(); + Map>> indexMapBuilder = new HashMap<>(size); + for (int i = 0; i < size; i++) { + String index = in.readString(); + int typesSize = in.readVInt(); + Map> typeMapBuilder = new HashMap<>(typesSize); + for (int j = 0; j < typesSize; j++) { + String type = in.readString(); + int fieldSize = in.readVInt(); + Map fieldMapBuilder = new HashMap<>(fieldSize); + for (int k = 0; k < fieldSize; k++) { + fieldMapBuilder.put(in.readString(), new FieldMappingMetaData(in.readString(), in.readBytesReference())); + } + typeMapBuilder.put(type, unmodifiableMap(fieldMapBuilder)); + } + indexMapBuilder.put(index, unmodifiableMap(typeMapBuilder)); + } + mappings = unmodifiableMap(indexMapBuilder); + + } + /** returns the retrieved field mapping. The return map keys are index, type, field (as specified in the request). */ public Map>> mappings() { return mappings; @@ -269,25 +293,7 @@ public int hashCode() { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - Map>> indexMapBuilder = new HashMap<>(size); - for (int i = 0; i < size; i++) { - String index = in.readString(); - int typesSize = in.readVInt(); - Map> typeMapBuilder = new HashMap<>(typesSize); - for (int j = 0; j < typesSize; j++) { - String type = in.readString(); - int fieldSize = in.readVInt(); - Map fieldMapBuilder = new HashMap<>(fieldSize); - for (int k = 0; k < fieldSize; k++) { - fieldMapBuilder.put(in.readString(), new FieldMappingMetaData(in.readString(), in.readBytesReference())); - } - typeMapBuilder.put(type, unmodifiableMap(fieldMapBuilder)); - } - indexMapBuilder.put(index, unmodifiableMap(typeMapBuilder)); - } - mappings = unmodifiableMap(indexMapBuilder); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index c7415391675fc..61a598c361cc9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentHelper; @@ -123,8 +124,8 @@ protected GetFieldMappingsResponse shardOperation(final GetFieldMappingsIndexReq } @Override - protected GetFieldMappingsResponse newResponse() { - return new GetFieldMappingsResponse(); + protected Writeable.Reader getResponseReader() { + return GetFieldMappingsResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexShardStats.java index 6cdcabccbc481..6c1de5b2992c4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexShardStats.java @@ -90,7 +90,7 @@ public CommonStats getPrimary() { @Override public void readFrom(StreamInput in) throws IOException { - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); int shardSize = in.readVInt(); shards = new ShardStats[shardSize]; for (int i = 0; i < shardSize; i++) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResult.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResult.java index cca5a812c3e96..7b57c9680bd16 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResult.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResult.java @@ -68,7 +68,7 @@ public boolean primary() { @Override public void readFrom(StreamInput in) throws IOException { - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); primary = in.readBoolean(); upgradeVersion = Version.readVersion(in); try { @@ -86,4 +86,4 @@ public void writeTo(StreamOutput out) throws IOException { Version.writeVersion(upgradeVersion, out); out.writeString(oldestLuceneSegment.toString()); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index f2d046f3321b2..b122350c3e61d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -184,26 +184,13 @@ protected ClusterBlockException checkRequestBlock(ClusterState state, UpgradeReq @Override protected void doExecute(Task task, UpgradeRequest request, final ActionListener listener) { - ActionListener settingsUpdateListener = new ActionListener() { - @Override - public void onResponse(UpgradeResponse upgradeResponse) { - try { - if (upgradeResponse.versions().isEmpty()) { - listener.onResponse(upgradeResponse); - } else { - updateSettings(upgradeResponse, listener); - } - } catch (Exception e) { - listener.onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); + super.doExecute(task, request, ActionListener.wrap(upgradeResponse -> { + if (upgradeResponse.versions().isEmpty()) { + listener.onResponse(upgradeResponse); + } else { + updateSettings(upgradeResponse, listener); } - }; - super.doExecute(task, request, settingsUpdateListener); + }, listener::onFailure)); } private void updateSettings(final UpgradeResponse upgradeResponse, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index 2f5db520088e9..7890fb4e83fc1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -59,27 +59,20 @@ public void execute(BulkRequest bulkRequest, long executionId) { semaphore.acquire(); toRelease = semaphore::release; CountDownLatch latch = new CountDownLatch(1); - retry.withBackoff(consumer, bulkRequest, new ActionListener() { + retry.withBackoff(consumer, bulkRequest, ActionListener.runAfter(new ActionListener() { @Override public void onResponse(BulkResponse response) { - try { - listener.afterBulk(executionId, bulkRequest, response); - } finally { - semaphore.release(); - latch.countDown(); - } + listener.afterBulk(executionId, bulkRequest, response); } @Override public void onFailure(Exception e) { - try { - listener.afterBulk(executionId, bulkRequest, e); - } finally { - semaphore.release(); - latch.countDown(); - } + listener.afterBulk(executionId, bulkRequest, e); } - }); + }, () -> { + semaphore.release(); + latch.countDown(); + })); bulkRequestSetupSuccessful = true; if (concurrentRequests == 0) { latch.await(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java index aa368c13fb80e..fc58e620738da 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java @@ -67,7 +67,7 @@ public void setForcedRefresh(boolean forcedRefresh) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); responses = new BulkItemResponse[in.readVInt()]; for (int i = 0; i < responses.length; i++) { responses[i] = BulkItemResponse.readBulkItem(in); diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java index 13c9d94e7dbc7..ba5618ce7de21 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.explain; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; /** * Entry point for the explain feature. @@ -35,6 +36,11 @@ private ExplainAction() { @Override public ExplainResponse newResponse() { - return new ExplainResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return ExplainResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index 5cecdf3a8b6b3..8ea7d0f12e341 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -60,6 +60,7 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO private Explanation explanation; private GetResult getResult; + // TODO(talevy): remove dependency on empty constructor from ExplainResponseTests ExplainResponse() { } @@ -80,6 +81,20 @@ public ExplainResponse(String index, String type, String id, boolean exists, Exp this.getResult = getResult; } + public ExplainResponse(StreamInput in) throws IOException { + super(in); + index = in.readString(); + type = in.readString(); + id = in.readString(); + exists = in.readBoolean(); + if (in.readBoolean()) { + explanation = readExplanation(in); + } + if (in.readBoolean()) { + getResult = GetResult.readGetResult(in); + } + } + public String getIndex() { return index; } @@ -123,17 +138,7 @@ public RestStatus status() { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - index = in.readString(); - type = in.readString(); - id = in.readString(); - exists = in.readBoolean(); - if (in.readBoolean()) { - explanation = readExplanation(in); - } - if (in.readBoolean()) { - getResult = GetResult.readGetResult(in); - } + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index fe8475322592f..c29da21fe4afe 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; @@ -152,8 +153,8 @@ protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId } @Override - protected ExplainResponse newResponse() { - return new ExplainResponse(); + protected Writeable.Reader getResponseReader() { + return ExplainResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index 01c21544047ed..274633b12a613 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ObjectMapper; @@ -114,8 +115,8 @@ protected FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesI } @Override - protected FieldCapabilitiesIndexResponse newResponse() { - return new FieldCapabilitiesIndexResponse(); + protected Writeable.Reader getResponseReader() { + return FieldCapabilitiesIndexResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/get/GetAction.java b/server/src/main/java/org/elasticsearch/action/get/GetAction.java index a622fd5a8178b..05d1b6c5a4c02 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class GetAction extends Action { @@ -32,6 +33,11 @@ private GetAction() { @Override public GetResponse newResponse() { - return new GetResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return GetResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index b9383785678b7..3d340d455ceb8 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -48,7 +48,9 @@ public class GetResponse extends ActionResponse implements Iterable responses; - List failures; + final IntArrayList locations; + final List responses; + final List failures; MultiGetShardResponse() { locations = new IntArrayList(); @@ -40,21 +40,8 @@ public class MultiGetShardResponse extends ActionResponse { failures = new ArrayList<>(); } - public void add(int location, GetResponse response) { - locations.add(location); - responses.add(response); - failures.add(null); - } - - public void add(int location, MultiGetResponse.Failure failure) { - locations.add(location); - responses.add(null); - failures.add(failure); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); + MultiGetShardResponse(StreamInput in) throws IOException { + super(in); int size = in.readVInt(); locations = new IntArrayList(size); responses = new ArrayList<>(size); @@ -62,9 +49,7 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { locations.add(in.readVInt()); if (in.readBoolean()) { - GetResponse response = new GetResponse(); - response.readFrom(in); - responses.add(response); + responses.add(new GetResponse(in)); } else { responses.add(null); } @@ -76,6 +61,23 @@ public void readFrom(StreamInput in) throws IOException { } } + public void add(int location, GetResponse response) { + locations.add(location); + responses.add(response); + failures.add(null); + } + + public void add(int location, MultiGetResponse.Failure failure) { + locations.add(location); + responses.add(null); + failures.add(failure); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -96,4 +98,4 @@ public void writeTo(StreamOutput out) throws IOException { } } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 7bc736b69f38c..65f42835f7374 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; @@ -108,8 +109,8 @@ protected GetResponse shardOperation(GetRequest request, ShardId shardId) { } @Override - protected GetResponse newResponse() { - return new GetResponse(); + protected Writeable.Reader getResponseReader() { + return GetResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 6c48b3b87c59f..9b8ea6bd6cac5 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; @@ -57,8 +58,8 @@ protected boolean isSubAction() { } @Override - protected MultiGetShardResponse newResponse() { - return new MultiGetShardResponse(); + protected Writeable.Reader getResponseReader() { + return MultiGetShardResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 97f13bf71d14c..be1528a354bc3 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -74,25 +73,13 @@ protected void masterOperation(PutPipelineRequest request, ClusterState state, A NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.clear(); nodesInfoRequest.ingest(true); - client.admin().cluster().nodesInfo(nodesInfoRequest, new ActionListener() { - @Override - public void onResponse(NodesInfoResponse nodeInfos) { - try { - Map ingestInfos = new HashMap<>(); - for (NodeInfo nodeInfo : nodeInfos.getNodes()) { - ingestInfos.put(nodeInfo.getNode(), nodeInfo.getIngest()); - } - ingestService.putPipeline(ingestInfos, request, listener); - } catch (Exception e) { - onFailure(e); - } + client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap(nodeInfos -> { + Map ingestInfos = new HashMap<>(); + for (NodeInfo nodeInfo : nodeInfos.getNodes()) { + ingestInfos.put(nodeInfo.getNode(), nodeInfo.getIngest()); } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + ingestService.putPipeline(ingestInfos, request, listener); + }, listener::onFailure)); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java index 298c7593f6c97..9017a7b94ecb4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class MultiSearchAction extends Action { @@ -32,6 +33,11 @@ private MultiSearchAction() { @Override public MultiSearchResponse newResponse() { - return new MultiSearchResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return MultiSearchResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java index 3ea90073f2b96..10163502391cd 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -59,19 +59,36 @@ public class MultiSearchResponse extends ActionResponse implements Iterable { @@ -32,6 +33,11 @@ private SearchAction() { @Override public SearchResponse newResponse() { - return new SearchResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return SearchResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 18c2f529a711d..6b641906d2e32 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -286,12 +286,19 @@ boolean isFinalReduce() { * ensure that the same value, determined by the coordinating node, is used on all nodes involved in the execution of the search * request. When created through {@link #crossClusterSearch(SearchRequest, String[], String, long, boolean)}, this method returns * the provided current time, otherwise it will return {@link System#currentTimeMillis()}. - * */ long getOrCreateAbsoluteStartMillis() { return absoluteStartMillis == DEFAULT_ABSOLUTE_START_MILLIS ? System.currentTimeMillis() : absoluteStartMillis; } + /** + * Returns the provided absoluteStartMillis when created through {@link #crossClusterSearch} and + * -1 otherwise. + */ + long getAbsoluteStartMillis() { + return absoluteStartMillis; + } + /** * Sets the indices the search will be executed on. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index b9b1887be2da8..b93b99ae15998 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -66,23 +66,33 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early"); private static final ParseField NUM_REDUCE_PHASES = new ParseField("num_reduce_phases"); - private SearchResponseSections internalResponse; - - private String scrollId; - - private int totalShards; - - private int successfulShards; - - private int skippedShards; - - private ShardSearchFailure[] shardFailures; - - private Clusters clusters; - - private long tookInMillis; - - public SearchResponse() { + private final SearchResponseSections internalResponse; + private final String scrollId; + private final int totalShards; + private final int successfulShards; + private final int skippedShards; + private final ShardSearchFailure[] shardFailures; + private final Clusters clusters; + private final long tookInMillis; + + public SearchResponse(StreamInput in) throws IOException { + super(in); + internalResponse = new InternalSearchResponse(in); + totalShards = in.readVInt(); + successfulShards = in.readVInt(); + int size = in.readVInt(); + if (size == 0) { + shardFailures = ShardSearchFailure.EMPTY_ARRAY; + } else { + shardFailures = new ShardSearchFailure[size]; + for (int i = 0; i < shardFailures.length; i++) { + shardFailures[i] = readShardSearchFailure(in); + } + } + clusters = new Clusters(in); + scrollId = in.readOptionalString(); + tookInMillis = in.readVLong(); + skippedShards = in.readVInt(); } public SearchResponse(SearchResponseSections internalResponse, String scrollId, int totalShards, int successfulShards, @@ -193,10 +203,6 @@ public String getScrollId() { return scrollId; } - public void scrollId(String scrollId) { - this.scrollId = scrollId; - } - /** * If profiling was enabled, this returns an object containing the profile results from * each shard. If profiling was not enabled, this will return null @@ -355,24 +361,8 @@ static SearchResponse innerFromXContent(XContentParser parser) throws IOExceptio } @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - internalResponse = new InternalSearchResponse(in); - totalShards = in.readVInt(); - successfulShards = in.readVInt(); - int size = in.readVInt(); - if (size == 0) { - shardFailures = ShardSearchFailure.EMPTY_ARRAY; - } else { - shardFailures = new ShardSearchFailure[size]; - for (int i = 0; i < shardFailures.length; i++) { - shardFailures[i] = readShardSearchFailure(in); - } - } - clusters = new Clusters(in); - scrollId = in.readOptionalString(); - tookInMillis = in.readVLong(); - skippedShards = in.readVInt(); + public void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java index ff72a7e5e51f3..0b4adfc1ba55c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class SearchScrollAction extends Action { @@ -32,6 +33,11 @@ private SearchScrollAction() { @Override public SearchResponse newResponse() { - return new SearchResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return SearchResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java index dfcf6445abf7d..ad72ef10139ba 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; @@ -86,21 +87,16 @@ public ThreadedActionListener(Logger logger, ThreadPool threadPool, String execu @Override public void onResponse(final Response response) { - threadPool.executor(executor).execute(new AbstractRunnable() { + threadPool.executor(executor).execute(new ActionRunnable<>(listener) { @Override public boolean isForceExecution() { return forceExecution; } @Override - protected void doRun() throws Exception { + protected void doRun() { listener.onResponse(response); } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } }); } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java index 3869ab891b837..f61ff4ac748b0 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java @@ -60,7 +60,7 @@ public IndicesOptions indicesOptions() { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); originalIndices = OriginalIndices.readOriginalIndices(in); } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java index 6845e6ced6cee..51e59a8c18982 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java @@ -49,7 +49,7 @@ public ShardId getShardId() { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override @@ -57,4 +57,4 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 87c9e15324152..15daaf786b604 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -36,7 +37,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; @@ -287,45 +287,25 @@ class ShardTransportHandler implements TransportRequestHandler { @Override public void messageReceived(ShardRequest request, TransportChannel channel, Task task) throws Exception { - asyncShardOperation(request, task, new ActionListener() { - @Override - public void onResponse(ShardResponse response) { - try { - channel.sendResponse(response); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (Exception e1) { - logger.warn(() -> new ParameterizedMessage( - "Failed to send error response for action [{}] and request [{}]", actionName, request), e1); + asyncShardOperation(request, task, + ActionListener.wrap(channel::sendResponse, e -> { + try { + channel.sendResponse(e); + } catch (Exception e1) { + logger.warn(() -> new ParameterizedMessage( + "Failed to send error response for action [{}] and request [{}]", actionName, request), e1); + } } - } - }); + )); } } protected void asyncShardOperation(ShardRequest request, Task task, ActionListener listener) { - transportService.getThreadPool().executor(getExecutor(request)).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - + transportService.getThreadPool().executor(shardExecutor).execute(new ActionRunnable(listener) { @Override protected void doRun() throws Exception { listener.onResponse(shardOperation(request, task)); } }); } - - protected String getExecutor(ShardRequest request) { - return shardExecutor; - } - } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 31d18d4dc0537..857103071e022 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -69,7 +69,7 @@ public abstract class ReplicationRequest @Override public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { - shardOperation(request, new ActionListener() { - @Override - public void onResponse(Response response) { - try { - channel.sendResponse(response); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (Exception inner) { - inner.addSuppressed(e); - logger.warn("failed to send response for get", inner); + shardOperation(request, + ActionListener.wrap(channel::sendResponse, e -> { + try { + channel.sendResponse(e); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.warn("failed to send response for get", inner); + } } - } - }); - + )); } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java index 6dc11877e7cd7..15b10832b4eff 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java @@ -97,7 +97,7 @@ public IndicesOptions indicesOptions() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); if (in.readBoolean()) { - internalShardId = ShardId.readShardId(in); + internalShardId = new ShardId(in); } index = in.readOptionalString(); // no need to pass threading over the network, they are always false when coming throw a thread pool @@ -106,9 +106,8 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalStreamable(internalShardId); + out.writeOptionalWriteable(internalShardId); out.writeOptionalString(index); } - } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 6d7ad085dcd1e..123ed11769aa7 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ChannelActionListener; @@ -38,8 +39,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -106,19 +107,15 @@ protected void doExecute(Task task, Request request, ActionListener li protected abstract Response shardOperation(Request request, ShardId shardId) throws IOException; protected void asyncShardOperation(Request request, ShardId shardId, ActionListener listener) throws IOException { - threadPool.executor(getExecutor(request, shardId)).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - + threadPool.executor(getExecutor(request, shardId)).execute(new ActionRunnable<>(listener) { @Override protected void doRun() throws Exception { listener.onResponse(shardOperation(request, shardId)); } }); } - protected abstract Response newResponse(); + + protected abstract Writeable.Reader getResponseReader(); protected abstract boolean resolveIndex(Request request); @@ -182,13 +179,12 @@ private AsyncSingleAction(Request request, ActionListener listener) { public void start() { if (shardIt == null) { // just execute it on the local node + final Writeable.Reader reader = getResponseReader(); transportService.sendRequest(clusterService.localNode(), transportShardAction, internalRequest.request(), new TransportResponseHandler() { @Override public Response read(StreamInput in) throws IOException { - Response response = newResponse(); - response.readFrom(in); - return response; + return reader.read(in); } @Override @@ -251,14 +247,13 @@ private void perform(@Nullable final Exception currentFailure) { node ); } + final Writeable.Reader reader = getResponseReader(); transportService.sendRequest(node, transportShardAction, internalRequest.request(), new TransportResponseHandler() { @Override public Response read(StreamInput in) throws IOException { - Response response = newResponse(); - response.readFrom(in); - return response; + return reader.read(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index c2f9872ca5cee..8d80a15beb14b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -329,19 +329,8 @@ class NodeTransportHandler implements TransportRequestHandler { @Override public void messageReceived(final NodeTaskRequest request, final TransportChannel channel, Task task) throws Exception { - nodeOperation(request, new ActionListener() { - @Override - public void onResponse( - TransportTasksAction.NodeTasksResponse response) { - try { - channel.sendResponse(response); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { + nodeOperation(request, ActionListener.wrap(channel::sendResponse, + e -> { try { channel.sendResponse(e); } catch (IOException e1) { @@ -349,11 +338,10 @@ public void onFailure(Exception e) { logger.warn("Failed to send failure", e1); } } - }); + )); } } - private class NodeTaskRequest extends TransportRequest { private TasksRequest tasksRequest; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsItemResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsItemResponse.java index 3e32af7f2c250..14ac59cb132bd 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsItemResponse.java @@ -105,8 +105,7 @@ public void readFrom(StreamInput in) throws IOException { if (in.readBoolean()) { failure = MultiTermVectorsResponse.Failure.readFailure(in); } else { - response = new TermVectorsResponse(); - response.readFrom(in); + response = new TermVectorsResponse(in); } } @@ -120,4 +119,4 @@ public void writeTo(StreamOutput out) throws IOException { response.writeTo(out); } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardResponse.java index 346274c5925b3..2290ee9f52e42 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardResponse.java @@ -30,9 +30,9 @@ public class MultiTermVectorsShardResponse extends ActionResponse { - IntArrayList locations; - List responses; - List failures; + final IntArrayList locations; + final List responses; + final List failures; MultiTermVectorsShardResponse() { locations = new IntArrayList(); @@ -40,21 +40,8 @@ public class MultiTermVectorsShardResponse extends ActionResponse { failures = new ArrayList<>(); } - public void add(int location, TermVectorsResponse response) { - locations.add(location); - responses.add(response); - failures.add(null); - } - - public void add(int location, MultiTermVectorsResponse.Failure failure) { - locations.add(location); - responses.add(null); - failures.add(failure); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); + MultiTermVectorsShardResponse(StreamInput in) throws IOException { + super(in); int size = in.readVInt(); locations = new IntArrayList(size); responses = new ArrayList<>(size); @@ -62,9 +49,7 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { locations.add(in.readVInt()); if (in.readBoolean()) { - TermVectorsResponse response = new TermVectorsResponse(); - response.readFrom(in); - responses.add(response); + responses.add(new TermVectorsResponse(in)); } else { responses.add(null); } @@ -76,6 +61,23 @@ public void readFrom(StreamInput in) throws IOException { } } + public void add(int location, TermVectorsResponse response) { + locations.add(location); + responses.add(response); + failures.add(null); + } + + public void add(int location, MultiTermVectorsResponse.Failure failure) { + locations.add(location); + responses.add(null); + failures.add(failure); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -96,4 +98,4 @@ public void writeTo(StreamOutput out) throws IOException { } } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java index e701efe93ba7a..9b223eed3a3c8 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.Action; +import org.elasticsearch.common.io.stream.Writeable; public class TermVectorsAction extends Action { @@ -32,6 +33,11 @@ private TermVectorsAction() { @Override public TermVectorsResponse newResponse() { - return new TermVectorsResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return TermVectorsResponse::new; } } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java index 9159a07e83c03..3d0fb75f8d3eb 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java @@ -103,6 +103,20 @@ public TermVectorsResponse(String index, String type, String id) { TermVectorsResponse() { } + TermVectorsResponse(StreamInput in) throws IOException { + index = in.readString(); + type = in.readString(); + id = in.readString(); + docVersion = in.readVLong(); + exists = in.readBoolean(); + artificial = in.readBoolean(); + tookInMillis = in.readVLong(); + if (in.readBoolean()) { + headerRef = in.readBytesReference(); + termVectors = in.readBytesReference(); + } + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); @@ -127,17 +141,7 @@ private boolean hasTermVectors() { @Override public void readFrom(StreamInput in) throws IOException { - index = in.readString(); - type = in.readString(); - id = in.readString(); - docVersion = in.readVLong(); - exists = in.readBoolean(); - artificial = in.readBoolean(); - tookInMillis = in.readVLong(); - if (in.readBoolean()) { - headerRef = in.readBytesReference(); - termVectors = in.readBytesReference(); - } + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } public Fields getFields() throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index e8d6c1bcb4ff6..0292757121eff 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -58,8 +59,8 @@ protected boolean isSubAction() { } @Override - protected MultiTermVectorsShardResponse newResponse() { - return new MultiTermVectorsShardResponse(); + protected Writeable.Reader getResponseReader() { + return MultiTermVectorsShardResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index d87a08a0541aa..0e212ab7cce2f 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -110,8 +111,8 @@ protected TermVectorsResponse shardOperation(TermVectorsRequest request, ShardId } @Override - protected TermVectorsResponse newResponse() { - return new TermVectorsResponse(); + protected Writeable.Reader getResponseReader() { + return TermVectorsResponse::new; } @Override diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index b5720c023f095..4c2f4932de2f2 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -184,7 +184,6 @@ private static ClientTemplate buildTemplate(Settings providedSettings, Settings resourcesToClose.add(circuitBreakerService); PageCacheRecycler pageCacheRecycler = new PageCacheRecycler(settings); BigArrays bigArrays = new BigArrays(pageCacheRecycler, circuitBreakerService, CircuitBreaker.REQUEST); - resourcesToClose.add(pageCacheRecycler); modules.add(settingsModule); NetworkModule networkModule = new NetworkModule(settings, true, pluginsService.filterPlugins(NetworkPlugin.class), threadPool, bigArrays, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService, null); @@ -376,7 +375,6 @@ public void close() { closeables.add(plugin); } closeables.add(() -> ThreadPool.terminate(injector.getInstance(ThreadPool.class), 10, TimeUnit.SECONDS)); - closeables.add(injector.getInstance(PageCacheRecycler.class)); IOUtils.closeWhileHandlingException(closeables); } diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index 53e66b1790b3b..6b35a5bd307a0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -440,7 +440,7 @@ public RestoreInProgress(StreamInput in) throws IOException { ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); int shards = in.readVInt(); for (int j = 0; j < shards; j++) { - ShardId shardId = ShardId.readShardId(in); + ShardId shardId = new ShardId(in); ShardRestoreStatus shardState = ShardRestoreStatus.readShardRestoreStatus(in); builder.put(shardId, shardState); } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index b5827dd01a1d1..5190adf7ba2d9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -432,7 +432,7 @@ public SnapshotsInProgress(StreamInput in) throws IOException { ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); int shards = in.readVInt(); for (int j = 0; j < shards; j++) { - ShardId shardId = ShardId.readShardId(in); + ShardId shardId = new ShardId(in); builder.put(shardId, new ShardSnapshotStatus(in)); } long repositoryStateId = in.readLong(); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index c86da4b789bf0..1f9a45c4713f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -394,7 +394,7 @@ public static class FailedShardEntry extends TransportRequest { FailedShardEntry(StreamInput in) throws IOException { super(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); allocationId = in.readString(); primaryTerm = in.readVLong(); message = in.readString(); @@ -592,7 +592,7 @@ public static class StartedShardEntry extends TransportRequest { StartedShardEntry(StreamInput in) throws IOException { super(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); allocationId = in.readString(); primaryTerm = in.readVLong(); this.message = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java index a38a383b269d5..ac75c83c19a26 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/CoordinationState.java @@ -480,6 +480,7 @@ default void markLastAcceptedStateAsCommitted() { metaDataBuilder = MetaData.builder(lastAcceptedState.metaData()); } metaDataBuilder.clusterUUIDCommitted(true); + logger.info("cluster UUID set to [{}]", lastAcceptedState.metaData().clusterUUID()); } if (metaDataBuilder != null) { setLastAcceptedState(ClusterState.builder(lastAcceptedState).metaData(metaDataBuilder).build()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 154f4ab162d71..6304588e3121a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -478,7 +478,6 @@ public void onFailure(Exception e) { }); } - private void processJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { final Optional optionalJoin = joinRequest.getOptionalJoin(); synchronized (mutex) { @@ -648,7 +647,11 @@ protected void doStart() { coordinationState.set(new CoordinationState(settings, getLocalNode(), persistedState)); peerFinder.setCurrentTerm(getCurrentTerm()); configuredHostsResolver.start(); - VotingConfiguration votingConfiguration = coordinationState.get().getLastAcceptedState().getLastCommittedConfiguration(); + final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); + if (lastAcceptedState.metaData().clusterUUIDCommitted()) { + logger.info("cluster UUID [{}]", lastAcceptedState.metaData().clusterUUID()); + } + final VotingConfiguration votingConfiguration = lastAcceptedState.getLastCommittedConfiguration(); if (singleNodeDiscovery && votingConfiguration.isEmpty() == false && votingConfiguration.hasQuorum(Collections.singleton(getLocalNode().getId())) == false) { @@ -997,9 +1000,10 @@ public void publish(ClusterChangedEvent clusterChangedEvent, ActionListener(), ackListener, publishListener); currentPublication = Optional.of(publication); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommand.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommand.java index 3d59e2bceacdb..ec664c97067d1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ElasticsearchNodeCommand.java @@ -44,7 +44,7 @@ public abstract class ElasticsearchNodeCommand extends EnvironmentAwareCommand { private static final Logger logger = LogManager.getLogger(ElasticsearchNodeCommand.class); protected final NamedXContentRegistry namedXContentRegistry; - static final String DELIMITER = "------------------------------------------------------------------------\n"; + protected static final String DELIMITER = "------------------------------------------------------------------------\n"; static final String STOP_WARNING_MSG = DELIMITER + @@ -81,9 +81,8 @@ protected void processNodePathsWithLock(Terminal terminal, OptionSet options, En throw new ElasticsearchException(NO_NODE_FOLDER_FOUND_MSG); } processNodePaths(terminal, dataPaths, env); - } catch (LockObtainFailedException ex) { - throw new ElasticsearchException( - FAILED_TO_OBTAIN_NODE_LOCK_MSG + " [" + ex.getMessage() + "]"); + } catch (LockObtainFailedException e) { + throw new ElasticsearchException(FAILED_TO_OBTAIN_NODE_LOCK_MSG, e); } } @@ -177,6 +176,17 @@ protected void cleanUpOldMetaData(Terminal terminal, Path[] dataPaths, long newG MetaData.FORMAT.cleanupOldFiles(newGeneration, dataPaths); } + protected NodeEnvironment.NodePath[] toNodePaths(Path[] dataPaths) { + return Arrays.stream(dataPaths).map(ElasticsearchNodeCommand::createNodePath).toArray(NodeEnvironment.NodePath[]::new); + } + + private static NodeEnvironment.NodePath createNodePath(Path path) { + try { + return new NodeEnvironment.NodePath(path); + } catch (IOException e) { + throw new ElasticsearchException("Unable to investigate path [" + path + "]", e); + } + } //package-private for testing OptionParser getParser() { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 0d4dbb2e688fc..3a52324661fc0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -208,7 +208,7 @@ static Level getLogLevel(TransportException e) { } void logWarnWithTimestamp() { - logger.info(() -> new ParameterizedMessage("last failed join attempt was {} ms ago, failed to join {} with {}", + logger.info(() -> new ParameterizedMessage("last failed join attempt was {} ago, failed to join {} with {}", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest), diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java index d6bd22bcd76fd..ff054e71eee3a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeToolCli.java @@ -22,6 +22,7 @@ import org.elasticsearch.cli.MultiCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.env.NodeRepurposeCommand; +import org.elasticsearch.env.OverrideNodeVersionCommand; // NodeToolCli does not extend LoggingAwareCommand, because LoggingAwareCommand performs logging initialization // after LoggingAwareCommand instance is constructed. @@ -39,6 +40,7 @@ public NodeToolCli() { subcommands.put("repurpose", new NodeRepurposeCommand()); subcommands.put("unsafe-bootstrap", new UnsafeBootstrapMasterCommand()); subcommands.put("detach-cluster", new DetachClusterCommand()); + subcommands.put("override-version", new OverrideNodeVersionCommand()); } public static void main(String[] args) throws Exception { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 2e18e5aeae50a..1c9794191bf6e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -20,8 +20,9 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.logging.log4j.Logger; + import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; @@ -714,13 +715,6 @@ static void validateSplitIndex(ClusterState state, String sourceIndex, Settings targetIndexSettings) { IndexMetaData sourceMetaData = validateResize(state, sourceIndex, targetIndexMappingsTypes, targetIndexName, targetIndexSettings); IndexMetaData.selectSplitShard(0, sourceMetaData, IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.get(targetIndexSettings)); - if (sourceMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { - // ensure we have a single type since this would make the splitting code considerably more complex - // and a 5.x index would not be splittable unless it has been shrunk before so rather opt out of the complexity - // since in 5.x we don't have a setting to artificially set the number of routing shards - throw new IllegalStateException("source index created version is too old to apply a split operation"); - } - } static IndexMetaData validateResize(ClusterState state, String sourceIndex, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index d3520da670289..483835f633e7e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -195,7 +195,7 @@ public Set> entrySet() { } }; try (IndexAnalyzers fakeIndexAnalzyers = - new IndexAnalyzers(indexSettings, fakeDefault, fakeDefault, fakeDefault, analyzerMap, analyzerMap, analyzerMap)) { + new IndexAnalyzers(indexSettings, analyzerMap, analyzerMap, analyzerMap)) { MapperService mapperService = new MapperService(indexSettings, fakeIndexAnalzyers, xContentRegistry, similarityService, mapperRegistry, () -> null); mapperService.merge(indexMetaData, MapperService.MergeReason.MAPPING_RECOVERY); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 08dc3d1d70971..9d0a081af4cbf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -194,16 +193,7 @@ private ShardIterator preferenceActiveShardIterator(IndexShardRoutingTable index } } // if not, then use it as the index - int routingHash = Murmur3HashFunction.hash(preference); - if (nodes.getMinNodeVersion().onOrAfter(Version.V_6_0_0_alpha1)) { - // The AllocationService lists shards in a fixed order based on nodes - // so earlier versions of this class would have a tendency to - // select the same node across different shardIds. - // Better overall balancing can be achieved if each shardId opts - // for a different element in the list by also incorporating the - // shard ID into the hash of the user-supplied preference key. - routingHash = 31 * routingHash + indexShard.shardId.hashCode(); - } + int routingHash = 31 * Murmur3HashFunction.hash(preference) + indexShard.shardId.hashCode(); if (awarenessAttributes.isEmpty()) { return indexShard.activeInitializingShardsIt(routingHash); } else { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index bfc4ce0618833..bbe82c42cf71c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -279,7 +279,7 @@ public ShardRouting(ShardId shardId, StreamInput in) throws IOException { } public ShardRouting(StreamInput in) throws IOException { - this(ShardId.readShardId(in), in); + this(new ShardId(in), in); } /** diff --git a/server/src/main/java/org/elasticsearch/common/LegacyTimeBasedUUIDGenerator.java b/server/src/main/java/org/elasticsearch/common/LegacyTimeBasedUUIDGenerator.java deleted file mode 100644 index 74a08711042f7..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/LegacyTimeBasedUUIDGenerator.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common; - -import java.util.Base64; -import java.util.concurrent.atomic.AtomicInteger; - -/** - * These are essentially flake ids, but we use 6 (not 8) bytes for timestamp, and use 3 (not 2) bytes for sequence number. - * For more information about flake ids, check out - * https://archive.fo/2015.07.08-082503/http://www.boundary.com/blog/2012/01/flake-a-decentralized-k-ordered-unique-id-generator-in-erlang/ - */ - -class LegacyTimeBasedUUIDGenerator implements UUIDGenerator { - - // We only use bottom 3 bytes for the sequence number. Paranoia: init with random int so that if JVM/OS/machine goes down, clock slips - // backwards, and JVM comes back up, we are less likely to be on the same sequenceNumber at the same time: - private final AtomicInteger sequenceNumber = new AtomicInteger(SecureRandomHolder.INSTANCE.nextInt()); - - // Used to ensure clock moves forward: - private long lastTimestamp; - - private static final byte[] SECURE_MUNGED_ADDRESS = MacAddressProvider.getSecureMungedAddress(); - - static { - assert SECURE_MUNGED_ADDRESS.length == 6; - } - - /** Puts the lower numberOfLongBytes from l into the array, starting index pos. */ - private static void putLong(byte[] array, long l, int pos, int numberOfLongBytes) { - for (int i=0; i>> (i*8)); - } - } - - @Override - public String getBase64UUID() { - final int sequenceId = sequenceNumber.incrementAndGet() & 0xffffff; - long timestamp = System.currentTimeMillis(); - - synchronized (this) { - // Don't let timestamp go backwards, at least "on our watch" (while this JVM is running). We are still vulnerable if we are - // shut down, clock goes backwards, and we restart... for this we randomize the sequenceNumber on init to decrease chance of - // collision: - timestamp = Math.max(lastTimestamp, timestamp); - - if (sequenceId == 0) { - // Always force the clock to increment whenever sequence number is 0, in case we have a long time-slip backwards: - timestamp++; - } - - lastTimestamp = timestamp; - } - - final byte[] uuidBytes = new byte[15]; - - // Only use lower 6 bytes of the timestamp (this will suffice beyond the year 10000): - putLong(uuidBytes, timestamp, 0, 6); - - // MAC address adds 6 bytes: - System.arraycopy(SECURE_MUNGED_ADDRESS, 0, uuidBytes, 6, SECURE_MUNGED_ADDRESS.length); - - // Sequence number adds 3 bytes: - putLong(uuidBytes, sequenceId, 12, 3); - - assert 9 + SECURE_MUNGED_ADDRESS.length == uuidBytes.length; - - return Base64.getUrlEncoder().withoutPadding().encodeToString(uuidBytes); - } -} diff --git a/server/src/main/java/org/elasticsearch/common/UUIDs.java b/server/src/main/java/org/elasticsearch/common/UUIDs.java index a6a314c2cccb0..46643a79da2e2 100644 --- a/server/src/main/java/org/elasticsearch/common/UUIDs.java +++ b/server/src/main/java/org/elasticsearch/common/UUIDs.java @@ -26,7 +26,6 @@ public class UUIDs { private static final RandomBasedUUIDGenerator RANDOM_UUID_GENERATOR = new RandomBasedUUIDGenerator(); - private static final UUIDGenerator LEGACY_TIME_UUID_GENERATOR = new LegacyTimeBasedUUIDGenerator(); private static final UUIDGenerator TIME_UUID_GENERATOR = new TimeBasedUUIDGenerator(); /** Generates a time-based UUID (similar to Flake IDs), which is preferred when generating an ID to be indexed into a Lucene index as @@ -35,11 +34,6 @@ public static String base64UUID() { return TIME_UUID_GENERATOR.getBase64UUID(); } - /** Legacy implementation of {@link #base64UUID()}, for pre 6.0 indices. */ - public static String legacyBase64UUID() { - return LEGACY_TIME_UUID_GENERATOR.getBase64UUID(); - } - /** Returns a Base64 encoded version of a Version 4.0 compatible UUID as defined here: http://www.ietf.org/rfc/rfc4122.txt, using the * provided {@code Random} instance */ public static String randomBase64UUID(Random random) { diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java index e4cdb148a158e..6ed6722995cca 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.blobstore; import java.io.Closeable; -import java.io.IOException; /** * An interface for storing blobs. @@ -30,10 +29,4 @@ public interface BlobStore extends Closeable { * Get a blob container instance for storing blobs at the given {@link BlobPath}. */ BlobContainer blobContainer(BlobPath path); - - /** - * Delete the blob store at the given {@link BlobPath}. - */ - void delete(BlobPath path) throws IOException; - } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java index 8a4d51e4dc93c..60c39a48e09c1 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.blobstore.fs; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; @@ -72,16 +71,6 @@ public BlobContainer blobContainer(BlobPath path) { } } - @Override - public void delete(BlobPath path) throws IOException { - assert readOnly == false : "should not delete anything from a readonly repository: " + path; - //noinspection ConstantConditions in case assertions are disabled - if (readOnly) { - throw new ElasticsearchException("unexpectedly deleting [" + path + "] from a readonly repository"); - } - IOUtils.rm(buildPath(path)); - } - @Override public void close() { // nothing to do here... diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java index 21d1bd9f25564..9299edc459cb7 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -20,12 +20,18 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import java.io.IOException; +import java.io.InputStream; /** * first point of entry for a shape parser @@ -67,4 +73,20 @@ static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMa static ShapeBuilder parse(XContentParser parser) throws IOException { return parse(parser, null); } + + static ShapeBuilder parse(Object value) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field("value", value); + content.endObject(); + + try (InputStream stream = BytesReference.bytes(content).streamInput(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return parse(parser); + } + } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 56d1b5cedc33c..96a0cafc35b11 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; @@ -72,17 +71,9 @@ public static Query newNestedFilter() { /** * Creates a new non-nested docs query - * @param indexVersionCreated the index version created since newer indices can identify a parent field more efficiently */ - public static Query newNonNestedFilter(Version indexVersionCreated) { - if (indexVersionCreated.onOrAfter(Version.V_6_1_0)) { - return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); - } else { - return new BooleanQuery.Builder() - .add(new MatchAllDocsQuery(), Occur.FILTER) - .add(newNestedFilter(), Occur.MUST_NOT) - .build(); - } + public static Query newNonNestedFilter() { + return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); } public static BooleanQuery filtered(@Nullable Query query, @Nullable Query filter) { diff --git a/server/src/main/java/org/elasticsearch/common/recycler/AbstractRecycler.java b/server/src/main/java/org/elasticsearch/common/recycler/AbstractRecycler.java index 05fa525972689..546d801d70b47 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/AbstractRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/AbstractRecycler.java @@ -28,9 +28,4 @@ protected AbstractRecycler(Recycler.C c) { this.c = c; } - @Override - public void close() { - // no-op by default - } - } diff --git a/server/src/main/java/org/elasticsearch/common/recycler/ConcurrentDequeRecycler.java b/server/src/main/java/org/elasticsearch/common/recycler/ConcurrentDequeRecycler.java index 04103c5e274d9..54374cc3bdebe 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/ConcurrentDequeRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/ConcurrentDequeRecycler.java @@ -37,13 +37,6 @@ public ConcurrentDequeRecycler(C c, int maxSize) { this.size = new AtomicInteger(); } - @Override - public void close() { - assert deque.size() == size.get(); - super.close(); - size.set(0); - } - @Override public V obtain() { final V v = super.obtain(); diff --git a/server/src/main/java/org/elasticsearch/common/recycler/DequeRecycler.java b/server/src/main/java/org/elasticsearch/common/recycler/DequeRecycler.java index a40befe9d8191..0f201133eceea 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/DequeRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/DequeRecycler.java @@ -36,16 +36,6 @@ public DequeRecycler(C c, Deque queue, int maxSize) { this.maxSize = maxSize; } - @Override - public void close() { - // call destroy() for every cached object - for (T t : deque) { - c.destroy(t); - } - // finally get rid of all references - deque.clear(); - } - @Override public V obtain() { final T v = deque.pollFirst(); diff --git a/server/src/main/java/org/elasticsearch/common/recycler/FilterRecycler.java b/server/src/main/java/org/elasticsearch/common/recycler/FilterRecycler.java index 426185173e581..5011402f6d97a 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/FilterRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/FilterRecycler.java @@ -34,9 +34,4 @@ public Recycler.V obtain() { return wrap(getDelegate().obtain()); } - @Override - public void close() { - getDelegate().close(); - } - } diff --git a/server/src/main/java/org/elasticsearch/common/recycler/NoneRecycler.java b/server/src/main/java/org/elasticsearch/common/recycler/NoneRecycler.java index 865182b88e104..102f1d424305a 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/NoneRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/NoneRecycler.java @@ -31,11 +31,6 @@ public V obtain() { return new NV<>(c.newInstance()); } - @Override - public void close() { - // no-op - } - public static class NV implements Recycler.V { T value; diff --git a/server/src/main/java/org/elasticsearch/common/recycler/Recycler.java b/server/src/main/java/org/elasticsearch/common/recycler/Recycler.java index 161e6463423f3..95a67fdf8e015 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/Recycler.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/Recycler.java @@ -25,7 +25,7 @@ * A recycled object, note, implementations should support calling obtain and then recycle * on different threads. */ -public interface Recycler extends Releasable { +public interface Recycler { interface Factory { Recycler build(); @@ -53,8 +53,6 @@ interface V extends Releasable { } - void close(); - V obtain(); } diff --git a/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java b/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java index 3ea9d17c25f19..5bfd3448e2336 100644 --- a/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java +++ b/server/src/main/java/org/elasticsearch/common/recycler/Recyclers.java @@ -145,13 +145,6 @@ protected Recycler getDelegate() { return recyclers[slot()]; } - @Override - public void close() { - for (Recycler recycler : recyclers) { - recycler.close(); - } - } - }; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 514cfd3ce4ca8..1e5079124c345 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -466,6 +466,11 @@ public final String getRaw(final Settings settings) { * @return the raw string representation of the setting value */ String innerGetRaw(final Settings settings) { + SecureSettings secureSettings = settings.getSecureSettings(); + if (secureSettings != null && secureSettings.getSettingNames().contains(getKey())) { + throw new IllegalArgumentException("Setting [" + getKey() + "] is a non-secure setting" + + " and must be stored inside elasticsearch.yml, but was found inside the Elasticsearch keystore"); + } return settings.get(getKey(), defaultValue.apply(settings)); } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 027d360153f2b..330681e2624a2 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -110,7 +110,7 @@ public class DateFormatters { .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .optionalStart() - .appendFraction(NANO_OF_SECOND, 3, 9, true) + .appendFraction(NANO_OF_SECOND, 1, 9, true) .optionalEnd() .optionalEnd() .optionalStart() @@ -178,7 +178,7 @@ public class DateFormatters { /** * Returns a ISO 8601 compatible date time formatter and parser. * This is not fully compatible to the existing spec, which would require far more edge cases, but merely compatible with the - * existing joda time ISO data formater + * existing joda time ISO date formatter */ private static final DateFormatter ISO_8601 = new JavaDateFormatter("iso8601", STRICT_DATE_OPTIONAL_TIME_PRINTER, new DateTimeFormatterBuilder() @@ -201,6 +201,8 @@ public class DateFormatters { .appendFraction(NANO_OF_SECOND, 1, 9, false) .optionalEnd() .optionalEnd() + .optionalEnd() + .optionalEnd() .optionalStart() .appendZoneOrOffsetId() .optionalEnd() @@ -208,8 +210,6 @@ public class DateFormatters { .append(TIME_ZONE_FORMATTER_NO_COLON) .optionalEnd() .optionalEnd() - .optionalEnd() - .optionalEnd() .toFormatter(Locale.ROOT)); ///////////////////////////////////////// diff --git a/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java b/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java index 4ca408e044170..40b9a8c7e9468 100644 --- a/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java @@ -20,8 +20,6 @@ package org.elasticsearch.common.util; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.recycler.AbstractRecyclerC; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; @@ -39,7 +37,7 @@ import static org.elasticsearch.common.recycler.Recyclers.none; /** A recycler of fixed-size pages. */ -public class PageCacheRecycler implements Releasable { +public class PageCacheRecycler { public static final Setting TYPE_SETTING = new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, Property.NodeScope); @@ -73,11 +71,6 @@ public class PageCacheRecycler implements Releasable { NON_RECYCLING_INSTANCE = new PageCacheRecycler(Settings.builder().put(LIMIT_HEAP_SETTING.getKey(), "0%").build()); } - @Override - public void close() { - Releasables.close(true, bytePage, intPage, longPage, objectPage); - } - public PageCacheRecycler(Settings settings) { final Type type = TYPE_SETTING.get(settings); final long limit = LIMIT_HEAP_SETTING.get(settings).getBytes(); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java index 14d1ad5b64638..ac917e72b2d2f 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java @@ -148,11 +148,17 @@ protected void afterExecute(Runnable r, Throwable t) { assert super.unwrap(r) instanceof TimedRunnable : "expected only TimedRunnables in queue"; final TimedRunnable timedRunnable = (TimedRunnable) super.unwrap(r); final long taskNanos = timedRunnable.getTotalNanos(); + final boolean failedOrRejected = timedRunnable.getFailedOrRejected(); final long totalNanos = totalTaskNanos.addAndGet(taskNanos); final long taskExecutionNanos = timedRunnable.getTotalExecutionNanos(); - assert taskExecutionNanos >= 0 : "expected task to always take longer than 0 nanoseconds, got: " + taskExecutionNanos; - executionEWMA.addValue(taskExecutionNanos); + assert taskExecutionNanos >= 0 || (failedOrRejected && taskExecutionNanos == -1) : + "expected task to always take longer than 0 nanoseconds or have '-1' failure code, got: " + taskExecutionNanos + + ", failedOrRejected: " + failedOrRejected; + if (taskExecutionNanos != -1) { + // taskExecutionNanos may be -1 if the task threw an exception + executionEWMA.addValue(taskExecutionNanos); + } if (taskCount.incrementAndGet() == this.tasksPerFrame) { final long endTimeNs = System.nanoTime(); diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java index b6b9ef1ad05bf..f2de68453a6c2 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java @@ -30,6 +30,7 @@ class TimedRunnable extends AbstractRunnable implements WrappedRunnable { private final long creationTimeNanos; private long startTimeNanos; private long finishTimeNanos = -1; + private boolean failedOrRejected = false; TimedRunnable(final Runnable original) { this.original = original; @@ -48,6 +49,7 @@ public void doRun() { @Override public void onRejection(final Exception e) { + this.failedOrRejected = true; if (original instanceof AbstractRunnable) { ((AbstractRunnable) original).onRejection(e); } else { @@ -64,6 +66,7 @@ public void onAfter() { @Override public void onFailure(final Exception e) { + this.failedOrRejected = true; if (original instanceof AbstractRunnable) { ((AbstractRunnable) original).onFailure(e); } else { @@ -100,6 +103,14 @@ long getTotalExecutionNanos() { return Math.max(finishTimeNanos - startTimeNanos, 1); } + /** + * If the task was failed or rejected, return true. + * Otherwise, false. + */ + boolean getFailedOrRejected() { + return this.failedOrRejected; + } + @Override public Runnable unwrap() { return original; diff --git a/server/src/main/java/org/elasticsearch/discovery/FileBasedSeedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/FileBasedSeedHostsProvider.java index 3af83e36311eb..8e0192f58e720 100644 --- a/server/src/main/java/org/elasticsearch/discovery/FileBasedSeedHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/FileBasedSeedHostsProvider.java @@ -75,7 +75,7 @@ private List getHostsList() { @Override public List getSeedAddresses(HostsResolver hostsResolver) { - final List transportAddresses = hostsResolver.resolveHosts(getHostsList(), 1); + final List transportAddresses = hostsResolver.resolveHosts(getHostsList()); logger.debug("seed addresses: {}", transportAddresses); return transportAddresses; } diff --git a/server/src/main/java/org/elasticsearch/discovery/SeedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/SeedHostsProvider.java index 12eb11e368618..4811d13d2d970 100644 --- a/server/src/main/java/org/elasticsearch/discovery/SeedHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/SeedHostsProvider.java @@ -36,10 +36,9 @@ public interface SeedHostsProvider { /** * Helper object that allows to resolve a list of hosts to a list of transport addresses. - * Each host is resolved into a transport address (or a collection of addresses if the - * number of ports is greater than one) + * Each host is resolved into a transport address */ interface HostsResolver { - List resolveHosts(List hosts, int limitPortCounts); + List resolveHosts(List hosts); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/SeedHostsResolver.java b/server/src/main/java/org/elasticsearch/discovery/SeedHostsResolver.java index 5ba0402389aa6..aa4c39ad8276d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/SeedHostsResolver.java +++ b/server/src/main/java/org/elasticsearch/discovery/SeedHostsResolver.java @@ -87,9 +87,7 @@ public static TimeValue getResolveTimeout(Settings settings) { } @Override - public List resolveHosts( - final List hosts, - final int limitPortCounts) { + public List resolveHosts(final List hosts) { Objects.requireNonNull(hosts); if (resolveTimeout.nanos() < 0) { throw new IllegalArgumentException("resolve timeout must be non-negative but was [" + resolveTimeout + "]"); @@ -98,7 +96,7 @@ public List resolveHosts( final List> callables = hosts .stream() - .map(hn -> (Callable) () -> transportService.addressesFromString(hn, limitPortCounts)) + .map(hn -> (Callable) () -> transportService.addressesFromString(hn)) .collect(Collectors.toList()); final List> futures; try { diff --git a/server/src/main/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProvider.java index b3b3ca27894a5..6a8cf3494ad37 100644 --- a/server/src/main/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProvider.java @@ -47,22 +47,14 @@ public class SettingsBasedSeedHostsProvider implements SeedHostsProvider { public static final Setting> DISCOVERY_SEED_HOSTS_SETTING = Setting.listSetting("discovery.seed_hosts", emptyList(), Function.identity(), Property.NodeScope); - // these limits are per-address - private static final int LIMIT_FOREIGN_PORTS_COUNT = 1; - private static final int LIMIT_LOCAL_PORTS_COUNT = 5; - private final List configuredHosts; - private final int limitPortCounts; public SettingsBasedSeedHostsProvider(Settings settings, TransportService transportService) { if (DISCOVERY_SEED_HOSTS_SETTING.exists(settings)) { configuredHosts = DISCOVERY_SEED_HOSTS_SETTING.get(settings); - // we only limit to 1 address, makes no sense to ping 100 ports - limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; } else { // if unicast hosts are not specified, fill with simple defaults on the local machine - configuredHosts = transportService.getLocalAddresses(); - limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; + configuredHosts = transportService.getDefaultSeedAddresses(); } logger.debug("using initial hosts {}", configuredHosts); @@ -70,6 +62,6 @@ public SettingsBasedSeedHostsProvider(Settings settings, TransportService transp @Override public List getSeedAddresses(HostsResolver hostsResolver) { - return hostsResolver.resolveHosts(configuredHosts, limitPortCounts); + return hostsResolver.resolveHosts(configuredHosts); } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index fc2f76d3436c0..4cfd22ecb1a65 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -31,6 +31,7 @@ import org.apache.lucene.store.NativeFSLockFactory; import org.apache.lucene.store.SimpleFSDirectory; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.CheckedFunction; @@ -248,7 +249,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce sharedDataPath = null; locks = null; nodeLockId = -1; - nodeMetaData = new NodeMetaData(generateNodeId(settings)); + nodeMetaData = new NodeMetaData(generateNodeId(settings), Version.CURRENT); return; } boolean success = false; @@ -393,7 +394,6 @@ private void maybeLogHeapDetails() { logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops); } - /** * scans the node paths and loads existing metaData file. If not found a new meta data will be generated * and persisted into the nodePaths @@ -403,10 +403,15 @@ private static NodeMetaData loadOrCreateNodeMetaData(Settings settings, Logger l final Path[] paths = Arrays.stream(nodePaths).map(np -> np.path).toArray(Path[]::new); NodeMetaData metaData = NodeMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, paths); if (metaData == null) { - metaData = new NodeMetaData(generateNodeId(settings)); + metaData = new NodeMetaData(generateNodeId(settings), Version.CURRENT); + } else { + metaData = metaData.upgradeToCurrentVersion(); } + // we write again to make sure all paths have the latest state file + assert metaData.nodeVersion().equals(Version.CURRENT) : metaData.nodeVersion() + " != " + Version.CURRENT; NodeMetaData.FORMAT.writeAndCleanup(metaData, paths); + return metaData; } diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetaData.java b/server/src/main/java/org/elasticsearch/env/NodeMetaData.java index dbea3164c8a44..f9deba8f6c382 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetaData.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetaData.java @@ -19,6 +19,7 @@ package org.elasticsearch.env; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,66 +32,104 @@ import java.util.Objects; /** - * Metadata associated with this node. Currently only contains the unique uuid describing this node. + * Metadata associated with this node: its persistent node ID and its version. * The metadata is persisted in the data folder of this node and is reused across restarts. */ public final class NodeMetaData { private static final String NODE_ID_KEY = "node_id"; + private static final String NODE_VERSION_KEY = "node_version"; private final String nodeId; - public NodeMetaData(final String nodeId) { + private final Version nodeVersion; + + public NodeMetaData(final String nodeId, final Version nodeVersion) { this.nodeId = Objects.requireNonNull(nodeId); + this.nodeVersion = Objects.requireNonNull(nodeVersion); } @Override public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; NodeMetaData that = (NodeMetaData) o; - - return Objects.equals(this.nodeId, that.nodeId); + return nodeId.equals(that.nodeId) && + nodeVersion.equals(that.nodeVersion); } @Override public int hashCode() { - return this.nodeId.hashCode(); + return Objects.hash(nodeId, nodeVersion); } @Override public String toString() { - return "node_id [" + nodeId + "]"; + return "NodeMetaData{" + + "nodeId='" + nodeId + '\'' + + ", nodeVersion=" + nodeVersion + + '}'; } private static ObjectParser PARSER = new ObjectParser<>("node_meta_data", Builder::new); static { PARSER.declareString(Builder::setNodeId, new ParseField(NODE_ID_KEY)); + PARSER.declareInt(Builder::setNodeVersionId, new ParseField(NODE_VERSION_KEY)); } public String nodeId() { return nodeId; } + public Version nodeVersion() { + return nodeVersion; + } + + public NodeMetaData upgradeToCurrentVersion() { + if (nodeVersion.equals(Version.V_EMPTY)) { + assert Version.CURRENT.major <= Version.V_7_0_0.major + 1 : "version is required in the node metadata from v9 onwards"; + return new NodeMetaData(nodeId, Version.CURRENT); + } + + if (nodeVersion.before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + throw new IllegalStateException( + "cannot upgrade a node from version [" + nodeVersion + "] directly to version [" + Version.CURRENT + "]"); + } + + if (nodeVersion.after(Version.CURRENT)) { + throw new IllegalStateException( + "cannot downgrade a node from version [" + nodeVersion + "] to version [" + Version.CURRENT + "]"); + } + + return nodeVersion.equals(Version.CURRENT) ? this : new NodeMetaData(nodeId, Version.CURRENT); + } + private static class Builder { String nodeId; + Version nodeVersion; public void setNodeId(String nodeId) { this.nodeId = nodeId; } + public void setNodeVersionId(int nodeVersionId) { + this.nodeVersion = Version.fromId(nodeVersionId); + } + public NodeMetaData build() { - return new NodeMetaData(nodeId); + final Version nodeVersion; + if (this.nodeVersion == null) { + assert Version.CURRENT.major <= Version.V_7_0_0.major + 1 : "version is required in the node metadata from v9 onwards"; + nodeVersion = Version.V_EMPTY; + } else { + nodeVersion = this.nodeVersion; + } + + return new NodeMetaData(nodeId, nodeVersion); } } - public static final MetaDataStateFormat FORMAT = new MetaDataStateFormat("node-") { @Override @@ -103,10 +142,11 @@ protected XContentBuilder newXContentBuilder(XContentType type, OutputStream str @Override public void toXContent(XContentBuilder builder, NodeMetaData nodeMetaData) throws IOException { builder.field(NODE_ID_KEY, nodeMetaData.nodeId); + builder.field(NODE_VERSION_KEY, nodeMetaData.nodeVersion.id); } @Override - public NodeMetaData fromXContent(XContentParser parser) throws IOException { + public NodeMetaData fromXContent(XContentParser parser) { return PARSER.apply(parser, null).build(); } }; diff --git a/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java b/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java index 7331d8528fc64..20b5552dfa8f8 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java +++ b/server/src/main/java/org/elasticsearch/env/NodeRepurposeCommand.java @@ -172,10 +172,6 @@ private String toIndexName(NodeEnvironment.NodePath[] nodePaths, String uuid) { } } - private NodeEnvironment.NodePath[] toNodePaths(Path[] dataPaths) { - return Arrays.stream(dataPaths).map(NodeRepurposeCommand::createNodePath).toArray(NodeEnvironment.NodePath[]::new); - } - private Set indexUUIDsFor(Set indexPaths) { return indexPaths.stream().map(Path::getFileName).map(Path::toString).collect(Collectors.toSet()); } @@ -221,19 +217,11 @@ private void removePath(Path path) { @SafeVarargs @SuppressWarnings("varargs") - private final Set uniqueParentPaths(Collection... paths) { + private Set uniqueParentPaths(Collection... paths) { // equals on Path is good enough here due to the way these are collected. return Arrays.stream(paths).flatMap(Collection::stream).map(Path::getParent).collect(Collectors.toSet()); } - private static NodeEnvironment.NodePath createNodePath(Path path) { - try { - return new NodeEnvironment.NodePath(path); - } catch (IOException e) { - throw new ElasticsearchException("Unable to investigate path: " + path + ": " + e.getMessage()); - } - } - //package-private for testing OptionParser getParser() { return parser; diff --git a/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java new file mode 100644 index 0000000000000..a46e185a25351 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/env/OverrideNodeVersionCommand.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.env; + +import joptsimple.OptionParser; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cluster.coordination.ElasticsearchNodeCommand; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Arrays; + +public class OverrideNodeVersionCommand extends ElasticsearchNodeCommand { + private static final Logger logger = LogManager.getLogger(OverrideNodeVersionCommand.class); + + private static final String TOO_NEW_MESSAGE = + DELIMITER + + "\n" + + "This data path was last written by Elasticsearch version [V_NEW] and may no\n" + + "longer be compatible with Elasticsearch version [V_CUR]. This tool will bypass\n" + + "this compatibility check, allowing a version [V_CUR] node to start on this data\n" + + "path, but a version [V_CUR] node may not be able to read this data or may read\n" + + "it incorrectly leading to data loss.\n" + + "\n" + + "You should not use this tool. Instead, continue to use a version [V_NEW] node\n" + + "on this data path. If necessary, you can use reindex-from-remote to copy the\n" + + "data from here into an older cluster.\n" + + "\n" + + "Do you want to proceed?\n"; + + private static final String TOO_OLD_MESSAGE = + DELIMITER + + "\n" + + "This data path was last written by Elasticsearch version [V_OLD] which may be\n" + + "too old to be readable by Elasticsearch version [V_CUR]. This tool will bypass\n" + + "this compatibility check, allowing a version [V_CUR] node to start on this data\n" + + "path, but this version [V_CUR] node may not be able to read this data or may\n" + + "read it incorrectly leading to data loss.\n" + + "\n" + + "You should not use this tool. Instead, upgrade this data path from [V_OLD] to\n" + + "[V_CUR] using one or more intermediate versions of Elasticsearch.\n" + + "\n" + + "Do you want to proceed?\n"; + + static final String NO_METADATA_MESSAGE = "no node metadata found, so there is no version to override"; + static final String SUCCESS_MESSAGE = "Successfully overwrote this node's metadata to bypass its version compatibility checks."; + + public OverrideNodeVersionCommand() { + super("Overwrite the version stored in this node's data path with [" + Version.CURRENT + + "] to bypass the version compatibility checks"); + } + + @Override + protected void processNodePaths(Terminal terminal, Path[] dataPaths, Environment env) throws IOException { + final Path[] nodePaths = Arrays.stream(toNodePaths(dataPaths)).map(p -> p.path).toArray(Path[]::new); + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodePaths); + if (nodeMetaData == null) { + throw new ElasticsearchException(NO_METADATA_MESSAGE); + } + + try { + nodeMetaData.upgradeToCurrentVersion(); + throw new ElasticsearchException("found [" + nodeMetaData + "] which is compatible with current version [" + Version.CURRENT + + "], so there is no need to override the version checks"); + } catch (IllegalStateException e) { + // ok, means the version change is not supported + } + + confirm(terminal, (nodeMetaData.nodeVersion().before(Version.CURRENT) ? TOO_OLD_MESSAGE : TOO_NEW_MESSAGE) + .replace("V_OLD", nodeMetaData.nodeVersion().toString()) + .replace("V_NEW", nodeMetaData.nodeVersion().toString()) + .replace("V_CUR", Version.CURRENT.toString())); + + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(nodeMetaData.nodeId(), Version.CURRENT), nodePaths); + + terminal.println(SUCCESS_MESSAGE); + } + + //package-private for testing + OptionParser getParser() { + return parser; + } +} diff --git a/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java index 7e4172961ea1e..d8b96550ad01a 100644 --- a/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java @@ -93,7 +93,7 @@ public abstract AllocateUnassignedDecision makeAllocationDecision(ShardRouting u * Builds decisions for all nodes in the cluster, so that the explain API can provide information on * allocation decisions for each node, while still waiting to allocate the shard (e.g. due to fetching shard data). */ - protected List buildDecisionsForAllNodes(ShardRouting shard, RoutingAllocation allocation) { + protected static List buildDecisionsForAllNodes(ShardRouting shard, RoutingAllocation allocation) { List results = new ArrayList<>(); for (RoutingNode node : allocation.routingNodes()) { Decision decision = allocation.deciders().canAllocate(shard, node, allocation); diff --git a/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java b/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java index 9a1c79b476e1b..fefd807d8d8a5 100644 --- a/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java +++ b/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexGraveyard; @@ -162,14 +163,14 @@ private void allocateDanglingIndices() { } try { allocateDangledIndices.allocateDangled(Collections.unmodifiableCollection(new ArrayList<>(danglingIndices.values())), - new LocalAllocateDangledIndices.Listener() { + new ActionListener<>() { @Override public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) { logger.trace("allocated dangled"); } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.info("failed to send allocated dangled", e); } } diff --git a/server/src/main/java/org/elasticsearch/gateway/Gateway.java b/server/src/main/java/org/elasticsearch/gateway/Gateway.java index dea544d40d55b..c59d52c60be7a 100644 --- a/server/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/server/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -28,9 +28,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; -import org.elasticsearch.indices.IndicesService; import java.util.Arrays; import java.util.function.Function; @@ -43,12 +41,7 @@ public class Gateway { private final TransportNodesListGatewayMetaState listGatewayMetaState; - private final IndicesService indicesService; - - public Gateway(final Settings settings, final ClusterService clusterService, - final TransportNodesListGatewayMetaState listGatewayMetaState, - final IndicesService indicesService) { - this.indicesService = indicesService; + public Gateway(final ClusterService clusterService, final TransportNodesListGatewayMetaState listGatewayMetaState) { this.clusterService = clusterService; this.listGatewayMetaState = listGatewayMetaState; } diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayException.java b/server/src/main/java/org/elasticsearch/gateway/GatewayException.java index 32050f1c10e7d..380610a593675 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayException.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayException.java @@ -26,14 +26,6 @@ public class GatewayException extends ElasticsearchException { - public GatewayException(String msg) { - super(msg); - } - - public GatewayException(String msg, Throwable cause) { - super(msg, cause); - } - public GatewayException(StreamInput in) throws IOException { super(in); } diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 30361fa70ee6b..91bcb68370ea1 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -44,9 +44,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.transport.TransportService; @@ -76,11 +74,9 @@ public class GatewayMetaState implements ClusterStateApplier, CoordinationState.PersistedState { protected static final Logger logger = LogManager.getLogger(GatewayMetaState.class); - private final NodeEnvironment nodeEnv; private final MetaStateService metaStateService; private final Settings settings; private final ClusterService clusterService; - private final IndicesService indicesService; private final TransportService transportService; //there is a single thread executing updateClusterState calls, hence no volatile modifier @@ -88,16 +84,13 @@ public class GatewayMetaState implements ClusterStateApplier, CoordinationState. protected ClusterState previousClusterState; protected boolean incrementalWrite; - public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, + public GatewayMetaState(Settings settings, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader, - TransportService transportService, ClusterService clusterService, - IndicesService indicesService) throws IOException { + TransportService transportService, ClusterService clusterService) throws IOException { this.settings = settings; - this.nodeEnv = nodeEnv; this.metaStateService = metaStateService; this.transportService = transportService; this.clusterService = clusterService; - this.indicesService = indicesService; upgradeMetaData(metaDataIndexUpgradeService, metaDataUpgrader); initializeClusterState(ClusterName.CLUSTER_NAME_SETTING.get(settings)); @@ -184,7 +177,7 @@ protected void upgradeMetaData(MetaDataIndexUpgradeService metaDataIndexUpgradeS } } - protected boolean isMasterOrDataNode() { + private boolean isMasterOrDataNode() { return DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings); } @@ -312,13 +305,12 @@ long writeIndex(String reason, IndexMetaData metaData) throws WriteStateExceptio } } - long writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { + void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { assert finished == false : FINISHED_MSG; try { - long generation = metaStateService.writeManifestAndCleanup(reason, manifest); + metaStateService.writeManifestAndCleanup(reason, manifest); commitCleanupActions.forEach(Runnable::run); finished = true; - return generation; } catch (WriteStateException e) { // if Manifest write results in dirty WriteStateException it's not safe to remove // new metadata files, because if Manifest was actually written to disk and its deletion @@ -346,7 +338,7 @@ void rollback() { * * @throws WriteStateException if exception occurs. See also {@link WriteStateException#isDirty()}. */ - protected void updateClusterState(ClusterState newState, ClusterState previousState) + private void updateClusterState(ClusterState newState, ClusterState previousState) throws WriteStateException { MetaData newMetaData = newState.metaData(); @@ -406,7 +398,7 @@ public static Set getRelevantIndices(ClusterState state, ClusterState pre } private static boolean isDataOnlyNode(ClusterState state) { - return ((state.nodes().getLocalNode().isMasterNode() == false) && state.nodes().getLocalNode().isDataNode()); + return state.nodes().getLocalNode().isMasterNode() == false && state.nodes().getLocalNode().isDataNode(); } /** @@ -535,8 +527,7 @@ private static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, C } private static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { - Set relevantIndices; - relevantIndices = new HashSet<>(); + Set relevantIndices = new HashSet<>(); // we have to iterate over the metadata to make sure we also capture closed indices for (IndexMetaData indexMetaData : state.metaData()) { relevantIndices.add(indexMetaData.getIndex()); diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayService.java b/server/src/main/java/org/elasticsearch/gateway/GatewayService.java index 3cc8ec167552c..b7b7d0759980e 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -40,7 +40,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.discovery.Discovery; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; @@ -93,7 +92,7 @@ public class GatewayService extends AbstractLifecycleComponent implements Cluste public GatewayService(final Settings settings, final AllocationService allocationService, final ClusterService clusterService, final ThreadPool threadPool, final TransportNodesListGatewayMetaState listGatewayMetaState, - final IndicesService indicesService, final Discovery discovery) { + final Discovery discovery) { this.allocationService = allocationService; this.clusterService = clusterService; this.threadPool = threadPool; @@ -122,7 +121,7 @@ public GatewayService(final Settings settings, final AllocationService allocatio recoveryRunnable = () -> clusterService.submitStateUpdateTask("local-gateway-elected-state", new RecoverStateUpdateTask()); } else { - final Gateway gateway = new Gateway(settings, clusterService, listGatewayMetaState, indicesService); + final Gateway gateway = new Gateway(clusterService, listGatewayMetaState); recoveryRunnable = () -> gateway.performStateRecovery(new GatewayRecoveryListener()); } diff --git a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index a5f4f77da438b..48117eed2d56d 100644 --- a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -76,7 +77,7 @@ public LocalAllocateDangledIndices(TransportService transportService, ClusterSer new AllocateDangledRequestHandler()); } - public void allocateDangled(Collection indices, final Listener listener) { + public void allocateDangled(Collection indices, ActionListener listener) { ClusterState clusterState = clusterService.state(); DiscoveryNode masterNode = clusterState.nodes().getMasterNode(); if (masterNode == null) { @@ -88,9 +89,7 @@ public void allocateDangled(Collection indices, final Listener li transportService.sendRequest(masterNode, ACTION_NAME, request, new TransportResponseHandler() { @Override public AllocateDangledResponse read(StreamInput in) throws IOException { - final AllocateDangledResponse response = new AllocateDangledResponse(); - response.readFrom(in); - return response; + return new AllocateDangledResponse(in); } @Override @@ -110,12 +109,6 @@ public String executor() { }); } - public interface Listener { - void onResponse(AllocateDangledResponse response); - - void onFailure(Throwable e); - } - class AllocateDangledRequestHandler implements TransportRequestHandler { @Override public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel, Task task) throws Exception { @@ -203,7 +196,7 @@ public void onFailure(String source, Exception e) { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { try { - channel.sendResponse(new AllocateDangledResponse(true)); + channel.sendResponse(new AllocateDangledResponse()); } catch (IOException e) { logger.warn("failed send response for allocating dangled", e); } @@ -248,29 +241,21 @@ public void writeTo(StreamOutput out) throws IOException { public static class AllocateDangledResponse extends TransportResponse { - private boolean ack; - - AllocateDangledResponse() { - } - - AllocateDangledResponse(boolean ack) { - this.ack = ack; - } - - public boolean ack() { - return ack; + private AllocateDangledResponse(StreamInput in) throws IOException { + if (in.getVersion().before(Version.V_8_0_0)) { + in.readBoolean(); + } } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - ack = in.readBoolean(); + private AllocateDangledResponse() { } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(ack); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeBoolean(true); + } } } } diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 3f28fead29439..d5dbfe828665f 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -382,7 +382,7 @@ private List findStateFilesByGeneration(final long generation, Path... loc return files; } - private String getStateFileName(long generation) { + public String getStateFileName(long generation) { return prefix + generation + STATE_FILE_EXTENSION; } @@ -466,7 +466,7 @@ public static void deleteMetaState(Path... dataLocations) throws IOException { IOUtils.rm(stateDirectories); } - String getPrefix() { + public String getPrefix() { return prefix; } } diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java index d67cdccb9a09b..3bd8ba11a57ec 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -199,12 +199,11 @@ MetaData loadGlobalState() throws IOException { * * @throws WriteStateException if exception when writing state occurs. See also {@link WriteStateException#isDirty()} */ - public long writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { + public void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { logger.trace("[_meta] writing state, reason [{}]", reason); try { long generation = MANIFEST_FORMAT.writeAndCleanup(manifest, nodeEnv.nodeDataPaths()); logger.trace("[_meta] state written (generation: {})", generation); - return generation; } catch (WriteStateException ex) { throw new WriteStateException(ex.isDirty(), "[_meta]: failed to write meta state", ex); } diff --git a/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 79030336acc02..d2e82d092e603 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -297,10 +297,10 @@ protected static NodeShardsResult buildNodeShardsResult(ShardRouting shard, bool /** * Split the list of node shard states into groups yes/no/throttle based on allocation deciders */ - private NodesToAllocate buildNodesToAllocate(RoutingAllocation allocation, - List nodeShardStates, - ShardRouting shardRouting, - boolean forceAllocate) { + private static NodesToAllocate buildNodesToAllocate(RoutingAllocation allocation, + List nodeShardStates, + ShardRouting shardRouting, + boolean forceAllocate) { List yesNodeShards = new ArrayList<>(); List throttledNodeShards = new ArrayList<>(); List noNodeShards = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/server/src/main/java/org/elasticsearch/gateway/PriorityComparator.java index 1d24baf561ab3..60bdc2434e972 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PriorityComparator.java +++ b/server/src/main/java/org/elasticsearch/gateway/PriorityComparator.java @@ -56,11 +56,11 @@ public final int compare(ShardRouting o1, ShardRouting o2) { return cmp; } - private int priority(Settings settings) { + private static int priority(Settings settings) { return IndexMetaData.INDEX_PRIORITY_SETTING.get(settings); } - private long timeCreated(Settings settings) { + private static long timeCreated(Settings settings) { return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1L); } diff --git a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index 10bd6115b4c74..ce3cde3e6db71 100644 --- a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -243,8 +243,8 @@ public AllocateUnassignedDecision makeAllocationDecision(final ShardRouting unas * YES or THROTTLE). If in explain mode, also returns the node-level explanations as the second element * in the returned tuple. */ - private Tuple> canBeAllocatedToAtLeastOneNode(ShardRouting shard, - RoutingAllocation allocation) { + private static Tuple> canBeAllocatedToAtLeastOneNode(ShardRouting shard, + RoutingAllocation allocation) { Decision madeDecision = Decision.NO; final boolean explain = allocation.debugDecision(); Map nodeDecisions = explain ? new HashMap<>() : null; @@ -260,7 +260,7 @@ private Tuple> canBeAllocatedToAtLea if (explain) { madeDecision = decision; } else { - return Tuple.tuple(decision, nodeDecisions); + return Tuple.tuple(decision, null); } } else if (madeDecision.type() == Decision.Type.NO && decision.type() == Decision.Type.THROTTLE) { madeDecision = decision; @@ -276,8 +276,8 @@ private Tuple> canBeAllocatedToAtLea * Takes the store info for nodes that have a shard store and adds them to the node decisions, * leaving the node explanations untouched for those nodes that do not have any store information. */ - private List augmentExplanationsWithStoreInfo(Map nodeDecisions, - Map withShardStores) { + private static List augmentExplanationsWithStoreInfo(Map nodeDecisions, + Map withShardStores) { if (nodeDecisions == null || withShardStores == null) { return null; } @@ -295,8 +295,8 @@ private List augmentExplanationsWithStoreInfo(Map data) { + private static TransportNodesListShardStoreMetaData.StoreFilesMetaData findStore(ShardRouting shard, RoutingAllocation allocation, + AsyncShardFetch.FetchResult data) { assert shard.currentNodeId() != null; DiscoveryNode primaryNode = allocation.nodes().get(shard.currentNodeId()); if (primaryNode == null) { diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 477961c8a6d0c..ab0fad88ecdfa 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -94,23 +94,10 @@ public Request() { public Request(String... nodesIds) { super(nodesIds); } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } } public static class NodesGatewayMetaState extends BaseNodesResponse { - NodesGatewayMetaState() { - } - public NodesGatewayMetaState(ClusterName clusterName, List nodes, List failures) { super(clusterName, nodes, failures); } @@ -135,15 +122,6 @@ public NodeRequest() { super(nodeId); } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } } public static class NodeGatewayMetaState extends BaseNodeResponse { diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index e2eb75458c0cc..1893be3acd518 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -50,6 +50,7 @@ import java.io.IOException; import java.util.List; +import java.util.Objects; /** * This transport action is used to fetch the shard version from each node during primary allocation in {@link GatewayAllocator}. @@ -185,7 +186,7 @@ public ShardId shardId() { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override @@ -229,7 +230,7 @@ public NodeRequest(String nodeId, Request request) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override @@ -309,14 +310,8 @@ public boolean equals(Object o) { NodeGatewayStartedShards that = (NodeGatewayStartedShards) o; - if (primary != that.primary) { - return false; - } - if (allocationId != null ? !allocationId.equals(that.allocationId) : that.allocationId != null) { - return false; - } - return storeException != null ? storeException.equals(that.storeException) : that.storeException == null; - + return primary == that.primary && Objects.equals(allocationId, that.allocationId) + && Objects.equals(storeException, that.storeException); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index a7fe19928762f..65e39e34a3e73 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -120,15 +119,6 @@ public IndexSortConfig(IndexSettings indexSettings) { .map((name) -> new FieldSortSpec(name)) .toArray(FieldSortSpec[]::new); - if (sortSpecs.length > 0 && indexSettings.getIndexVersionCreated().before(Version.V_6_0_0_alpha1)) { - /** - * This index might be assigned to a node where the index sorting feature is not available - * (ie. versions prior to {@link Version.V_6_0_0_alpha1_UNRELEASED}) so we must fail here rather than later. - */ - throw new IllegalArgumentException("unsupported index.version.created:" + indexSettings.getIndexVersionCreated() + - ", can't set index.sort on versions prior to " + Version.V_6_0_0_alpha1); - } - if (INDEX_SORT_ORDER_SETTING.exists(settings)) { List orders = INDEX_SORT_ORDER_SETTING.get(settings); if (orders.size() != sortSpecs.length) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 483a1b4a7e563..c4be6edd49069 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -51,6 +51,11 @@ public final class AnalysisRegistry implements Closeable { public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter"; public static final String INDEX_ANALYSIS_FILTER = "index.analysis.filter"; public static final String INDEX_ANALYSIS_TOKENIZER = "index.analysis.tokenizer"; + + public static final String DEFAULT_ANALYZER_NAME = "default"; + public static final String DEFAULT_SEARCH_ANALYZER_NAME = "default_search"; + public static final String DEFAULT_SEARCH_QUOTED_ANALYZER_NAME = "default_search_quoted"; + private final PrebuiltAnalysis prebuiltAnalysis; private final Map cachedAnalyzer = new ConcurrentHashMap<>(); @@ -439,37 +444,29 @@ public IndexAnalyzers build(IndexSettings indexSettings, "whitespace", () -> new WhitespaceTokenizer(), tokenFilterFactoryFactories, charFilterFactoryFactories); } - if (!analyzers.containsKey("default")) { - NamedAnalyzer defaultAnalyzer = produceAnalyzer("default", new StandardAnalyzerProvider(indexSettings, null, "default", - Settings.Builder.EMPTY_SETTINGS), tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories); - analyzers.put("default", defaultAnalyzer); - } - if (!analyzers.containsKey("default_search")) { - analyzers.put("default_search", analyzers.get("default")); + if (!analyzers.containsKey(DEFAULT_ANALYZER_NAME)) { + analyzers.put(DEFAULT_ANALYZER_NAME, + produceAnalyzer(DEFAULT_ANALYZER_NAME, + new StandardAnalyzerProvider(indexSettings, null, DEFAULT_ANALYZER_NAME, Settings.Builder.EMPTY_SETTINGS), + tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories)); } - if (!analyzers.containsKey("default_search_quoted")) { - analyzers.put("default_search_quoted", analyzers.get("default_search")); - } - - NamedAnalyzer defaultAnalyzer = analyzers.get("default"); + NamedAnalyzer defaultAnalyzer = analyzers.get(DEFAULT_ANALYZER_NAME); if (defaultAnalyzer == null) { throw new IllegalArgumentException("no default analyzer configured"); } defaultAnalyzer.checkAllowedInMode(AnalysisMode.ALL); + if (analyzers.containsKey("default_index")) { throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use " + "[index.analysis.analyzer.default] instead for index [" + index.getName() + "]"); } - NamedAnalyzer defaultSearchAnalyzer = analyzers.getOrDefault("default_search", defaultAnalyzer); - NamedAnalyzer defaultSearchQuoteAnalyzer = analyzers.getOrDefault("default_search_quote", defaultSearchAnalyzer); for (Map.Entry analyzer : analyzers.entrySet()) { if (analyzer.getKey().startsWith("_")) { throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\""); } } - return new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultSearchAnalyzer, defaultSearchQuoteAnalyzer, analyzers, normalizers, - whitespaceNormalizers); + return new IndexAnalyzers(indexSettings, analyzers, normalizers, whitespaceNormalizers); } private static NamedAnalyzer produceAnalyzer(String name, AnalyzerProvider analyzerFactory, diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java index 4cb0b9aa324c9..4f1cbeb4022ac 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java @@ -25,9 +25,13 @@ import java.io.Closeable; import java.io.IOException; import java.util.Map; +import java.util.Objects; import java.util.stream.Stream; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_ANALYZER_NAME; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_SEARCH_ANALYZER_NAME; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_SEARCH_QUOTED_ANALYZER_NAME; /** * IndexAnalyzers contains a name to analyzer mapping for a specific index. @@ -37,23 +41,18 @@ * @see AnalysisRegistry */ public final class IndexAnalyzers extends AbstractIndexComponent implements Closeable { - private final NamedAnalyzer defaultIndexAnalyzer; - private final NamedAnalyzer defaultSearchAnalyzer; - private final NamedAnalyzer defaultSearchQuoteAnalyzer; private final Map analyzers; private final Map normalizers; private final Map whitespaceNormalizers; - public IndexAnalyzers(IndexSettings indexSettings, NamedAnalyzer defaultIndexAnalyzer, NamedAnalyzer defaultSearchAnalyzer, - NamedAnalyzer defaultSearchQuoteAnalyzer, Map analyzers, - Map normalizers, Map whitespaceNormalizers) { + public IndexAnalyzers(IndexSettings indexSettings, Map analyzers, Map normalizers, + Map whitespaceNormalizers) { super(indexSettings); - if (defaultIndexAnalyzer.name().equals("default") == false) { - throw new IllegalStateException("default analyzer must have the name [default] but was: [" + defaultIndexAnalyzer.name() + "]"); + Objects.requireNonNull(analyzers.get(DEFAULT_ANALYZER_NAME), "the default analyzer must be set"); + if (analyzers.get(DEFAULT_ANALYZER_NAME).name().equals(DEFAULT_ANALYZER_NAME) == false) { + throw new IllegalStateException( + "default analyzer must have the name [default] but was: [" + analyzers.get(DEFAULT_ANALYZER_NAME).name() + "]"); } - this.defaultIndexAnalyzer = defaultIndexAnalyzer; - this.defaultSearchAnalyzer = defaultSearchAnalyzer; - this.defaultSearchQuoteAnalyzer = defaultSearchQuoteAnalyzer; this.analyzers = unmodifiableMap(analyzers); this.normalizers = unmodifiableMap(normalizers); this.whitespaceNormalizers = unmodifiableMap(whitespaceNormalizers); @@ -84,21 +83,21 @@ public NamedAnalyzer getWhitespaceNormalizer(String name) { * Returns the default index analyzer for this index */ public NamedAnalyzer getDefaultIndexAnalyzer() { - return defaultIndexAnalyzer; + return analyzers.get(DEFAULT_ANALYZER_NAME); } /** - * Returns the default search analyzer for this index + * Returns the default search analyzer for this index. If not set, this will return the default analyzer */ public NamedAnalyzer getDefaultSearchAnalyzer() { - return defaultSearchAnalyzer; + return analyzers.getOrDefault(DEFAULT_SEARCH_ANALYZER_NAME, getDefaultIndexAnalyzer()); } /** * Returns the default search quote analyzer for this index */ public NamedAnalyzer getDefaultSearchQuoteAnalyzer() { - return defaultSearchQuoteAnalyzer; + return analyzers.getOrDefault(DEFAULT_SEARCH_QUOTED_ANALYZER_NAME, getDefaultSearchAnalyzer()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java index 5776edd69fc83..5325821d050cb 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java @@ -137,14 +137,7 @@ public TokenFilterFactory getSynonymFilter() { if (allowForSynonymParsing) { return this; } - if (version.onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog(name(), "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } }; } @@ -164,14 +157,7 @@ public TokenFilterFactory getSynonymFilter() { if (allowForSynonymParsing) { return this; } - if (version.onOrAfter(Version.V_7_0_0)) { - throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); - } - else { - DEPRECATION_LOGGER.deprecatedAndMaybeLog(name(), "Token filter [" + name() - + "] will not be usable to parse synonyms after v7.0"); - return this; - } + throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } }; } diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 091de68c514d0..b1812c40e03eb 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -251,7 +251,7 @@ public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, fin } if (hasNested) { - warmUp.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated())); + warmUp.add(Queries.newNonNestedFilter()); } final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size()); diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 5acac256dbd50..b981bdb8a8421 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -157,11 +157,11 @@ public void verifyEngineBeforeIndexClosing() throws IllegalStateException { protected final DirectoryReader wrapReader(DirectoryReader reader, Function readerWrapperFunction) throws IOException { - reader = ElasticsearchDirectoryReader.wrap(reader, engineConfig.getShardId()); if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) { reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); } - return readerWrapperFunction.apply(reader); + reader = readerWrapperFunction.apply(reader); + return ElasticsearchDirectoryReader.wrap(reader, engineConfig.getShardId()); } protected DirectoryReader open(IndexCommit commit) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index aafe9f6ba03de..30c9606acd928 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -202,19 +202,7 @@ public static DynamicTemplate parse(String name, Map conf, XContentFieldType xcontentFieldType = null; if (matchMappingType != null && matchMappingType.equals("*") == false) { - try { - xcontentFieldType = XContentFieldType.fromString(matchMappingType); - } catch (IllegalArgumentException e) { - if (indexVersionCreated.onOrAfter(Version.V_6_0_0_alpha1)) { - throw e; - } else { - deprecationLogger.deprecated("match_mapping_type [" + matchMappingType + "] is invalid and will be ignored: " - + e.getMessage()); - // this template is on an unknown type so it will never match anything - // null indicates that the template should be ignored - return null; - } - } + xcontentFieldType = XContentFieldType.fromString(matchMappingType); } final MatchType matchType = MatchType.fromString(matchPattern); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 32c44fd5f55a0..12e53a5f9d4b9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -202,10 +201,7 @@ public void preParse(ParseContext context) { } @Override - public void postParse(ParseContext context) throws IOException { - if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { - super.parse(context); - } + public void postParse(ParseContext context) { } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 20b4bb37cc7aa..099ae9b2aa7a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -235,13 +235,14 @@ public String typeName() { return CONTENT_TYPE; } - public NamedAnalyzer normalizer() { + private NamedAnalyzer normalizer() { return normalizer; } public void setNormalizer(NamedAnalyzer normalizer) { checkIfFrozen(); this.normalizer = normalizer; + setIndexAnalyzer(normalizer); } public boolean splitQueriesOnWhitespace() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java index 8c032402b5090..7eb8d180547c1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SeqNoFieldMapper.java @@ -28,7 +28,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -255,15 +254,9 @@ public void postParse(ParseContext context) throws IOException { // we share the parent docs fields to ensure good compression SequenceIDFields seqID = context.seqID(); assert seqID != null; - final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated(); - final boolean includePrimaryTerm = versionCreated.before(Version.V_6_1_0); for (Document doc : context.nonRootDocuments()) { doc.add(seqID.seqNo); doc.add(seqID.seqNoDocValue); - if (includePrimaryTerm) { - // primary terms are used to distinguish between parent and nested docs since 6.1.0 - doc.add(seqID.primaryTerm); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index c4d9ef966ca3d..cb17f182ef77a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -140,7 +140,7 @@ public Query termsQuery(List values, QueryShardContext context) { .anyMatch(indexType::equals)) { if (context.getMapperService().hasNested()) { // type filters are expected not to match nested docs - return Queries.newNonNestedFilter(context.indexVersionCreated()); + return Queries.newNonNestedFilter(); } else { return new MatchAllDocsQuery(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java index 7a2373e5ad8b5..c5894a3e1c018 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ExistsQueryBuilder.java @@ -19,14 +19,11 @@ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -148,10 +145,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) { fields = context.simpleMatchToIndexNames(fieldPattern); } - if (context.indexVersionCreated().before(Version.V_6_1_0)) { - return newLegacyExistsQuery(context, fields); - } - if (fields.size() == 1) { String field = fields.iterator().next(); return newFieldExistsQuery(context, field); @@ -164,28 +157,6 @@ public static Query newFilter(QueryShardContext context, String fieldPattern) { return new ConstantScoreQuery(boolFilterBuilder.build()); } - private static Query newLegacyExistsQuery(QueryShardContext context, Collection fields) { - // We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery() - // so we don't end up with deprecation warnings - if (fields.size() == 1) { - Query filter = newLegacyExistsQuery(context, fields.iterator().next()); - return new ConstantScoreQuery(filter); - } - - BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); - for (String field : fields) { - Query filter = newLegacyExistsQuery(context, field); - boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); - } - return new ConstantScoreQuery(boolFilterBuilder.build()); - } - - private static Query newLegacyExistsQuery(QueryShardContext context, String field) { - MappedFieldType fieldType = context.fieldMapper(field); - String fieldName = fieldType != null ? fieldType.name() : field; - return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName)); - } - private static Query newFieldExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.getMapperService().fullName(field); if (fieldType == null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index ee8062308ac11..fecf5c8407e98 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -281,7 +281,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { Query innerQuery; ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); if (objectMapper == null) { - parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter()); } else { parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); } @@ -388,7 +388,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { SearchHit hit = hits[i]; Query rawParentFilter; if (parentObjectMapper == null) { - rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + rawParentFilter = Queries.newNonNestedFilter(); } else { rawParentFilter = parentObjectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 61beddb776c91..892056674019f 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -180,6 +180,18 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ private RetentionLeases retentionLeases = RetentionLeases.EMPTY; + /** + * The primary term of the most-recently persisted retention leases. This is used to check if we need to persist the current retention + * leases. + */ + private long persistedRetentionLeasesPrimaryTerm; + + /** + * The version of the most-recently persisted retention leases. This is used to check if we need to persist the current retention + * leases. + */ + private long persistedRetentionLeasesVersion; + /** * Get all retention leases tracked on this shard. * @@ -342,7 +354,8 @@ public RetentionLeases loadRetentionLeases(final Path path) throws IOException { private final Object retentionLeasePersistenceLock = new Object(); /** - * Persists the current retention leases to their dedicated state file. + * Persists the current retention leases to their dedicated state file. If this version of the retention leases are already persisted + * then persistence is skipped. * * @param path the path to the directory containing the state file * @throws WriteStateException if an exception occurs writing the state file @@ -351,10 +364,16 @@ public void persistRetentionLeases(final Path path) throws WriteStateException { synchronized (retentionLeasePersistenceLock) { final RetentionLeases currentRetentionLeases; synchronized (this) { + if (retentionLeases.supersedes(persistedRetentionLeasesPrimaryTerm, persistedRetentionLeasesVersion) == false) { + logger.trace("skipping persisting retention leases [{}], already persisted", retentionLeases); + return; + } currentRetentionLeases = retentionLeases; } logger.trace("persisting retention leases [{}]", currentRetentionLeases); RetentionLeases.FORMAT.writeAndCleanup(currentRetentionLeases, path); + persistedRetentionLeasesPrimaryTerm = currentRetentionLeases.primaryTerm(); + persistedRetentionLeasesVersion = currentRetentionLeases.version(); } } @@ -758,10 +777,9 @@ public synchronized void activatePrimaryMode(final long localCheckpoint) { * @param applyingClusterStateVersion the cluster state version being applied when updating the allocation IDs from the master * @param inSyncAllocationIds the allocation IDs of the currently in-sync shard copies * @param routingTable the shard routing table - * @param pre60AllocationIds the allocation IDs of shards that are allocated to pre-6.0 nodes */ public synchronized void updateFromMaster(final long applyingClusterStateVersion, final Set inSyncAllocationIds, - final IndexShardRoutingTable routingTable, final Set pre60AllocationIds) { + final IndexShardRoutingTable routingTable) { assert invariant(); if (applyingClusterStateVersion > appliedClusterStateVersion) { // check that the master does not fabricate new in-sync entries out of thin air once we are in primary mode @@ -782,8 +800,7 @@ public synchronized void updateFromMaster(final long applyingClusterStateVersion final boolean inSync = inSyncAllocationIds.contains(initializingId); assert inSync == false : "update from master in primary mode has " + initializingId + " as in-sync but it does not exist locally"; - final long localCheckpoint = pre60AllocationIds.contains(initializingId) ? - SequenceNumbers.PRE_60_NODE_CHECKPOINT : SequenceNumbers.UNASSIGNED_SEQ_NO; + final long localCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; final long globalCheckpoint = localCheckpoint; checkpoints.put(initializingId, new CheckpointState(localCheckpoint, globalCheckpoint, inSync, inSync)); } @@ -794,8 +811,7 @@ public synchronized void updateFromMaster(final long applyingClusterStateVersion } else { for (String initializingId : initializingAllocationIds) { if (shardAllocationId.equals(initializingId) == false) { - final long localCheckpoint = pre60AllocationIds.contains(initializingId) ? - SequenceNumbers.PRE_60_NODE_CHECKPOINT : SequenceNumbers.UNASSIGNED_SEQ_NO; + final long localCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; final long globalCheckpoint = localCheckpoint; checkpoints.put(initializingId, new CheckpointState(localCheckpoint, globalCheckpoint, false, false)); } @@ -807,8 +823,7 @@ public synchronized void updateFromMaster(final long applyingClusterStateVersion checkpointState.inSync = true; checkpointState.tracked = true; } else { - final long localCheckpoint = pre60AllocationIds.contains(inSyncId) ? - SequenceNumbers.PRE_60_NODE_CHECKPOINT : SequenceNumbers.UNASSIGNED_SEQ_NO; + final long localCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; final long globalCheckpoint = localCheckpoint; checkpoints.put(inSyncId, new CheckpointState(localCheckpoint, globalCheckpoint, true, true)); } @@ -1082,17 +1097,13 @@ private Runnable getMasterUpdateOperationFromCurrentState() { assert primaryMode == false; final long lastAppliedClusterStateVersion = appliedClusterStateVersion; final Set inSyncAllocationIds = new HashSet<>(); - final Set pre60AllocationIds = new HashSet<>(); checkpoints.entrySet().forEach(entry -> { if (entry.getValue().inSync) { inSyncAllocationIds.add(entry.getKey()); } - if (entry.getValue().getLocalCheckpoint() == SequenceNumbers.PRE_60_NODE_CHECKPOINT) { - pre60AllocationIds.add(entry.getKey()); - } }); final IndexShardRoutingTable lastAppliedRoutingTable = routingTable; - return () -> updateFromMaster(lastAppliedClusterStateVersion, inSyncAllocationIds, lastAppliedRoutingTable, pre60AllocationIds); + return () -> updateFromMaster(lastAppliedClusterStateVersion, inSyncAllocationIds, lastAppliedRoutingTable); } /** diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java index c503f1fa16377..c69a4c6fab042 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseActions.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; @@ -119,8 +120,8 @@ protected Response shardOperation(final T request, final ShardId shardId) { abstract void doRetentionLeaseAction(IndexShard indexShard, T request, ActionListener listener); @Override - protected Response newResponse() { - return new Response(); + protected Writeable.Reader getResponseReader() { + return Response::new; } @Override @@ -169,6 +170,10 @@ void doRetentionLeaseAction(final IndexShard indexShard, final AddRequest reques ActionListener.map(listener, r -> new Response())); } + @Override + protected Writeable.Reader getResponseReader() { + return Response::new; + } } @Override @@ -302,7 +307,7 @@ public ActionRequestValidationException validate() { @Override public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); id = in.readString(); } @@ -392,6 +397,12 @@ public RemoveRequest(final ShardId shardId, final String id) { public static class Response extends ActionResponse { + public Response() { + } + + Response(StreamInput in) throws IOException { + super(in); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java index 7c3b9e3c7b9c9..81fd7e2fce047 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeases.java @@ -70,13 +70,27 @@ public long version() { /** * Checks if this retention leases collection supersedes the specified retention leases collection. A retention leases collection - * supersedes another retention leases collection if its primary term is higher, or if for equal primary terms its version is higher + * supersedes another retention leases collection if its primary term is higher, or if for equal primary terms its version is higher. * * @param that the retention leases collection to test against * @return true if this retention leases collection supercedes the specified retention lease collection, otherwise false */ - public boolean supersedes(final RetentionLeases that) { - return primaryTerm > that.primaryTerm || primaryTerm == that.primaryTerm && version > that.version; + boolean supersedes(final RetentionLeases that) { + return supersedes(that.primaryTerm, that.version); + } + + /** + * Checks if this retention leases collection would supersede a retention leases collection with the specified primary term and version. + * A retention leases collection supersedes another retention leases collection if its primary term is higher, or if for equal primary + * terms its version is higher. + * + * @param primaryTerm the primary term + * @param version the version + * @return true if this retention leases collection would supercedes a retention lease collection with the specified primary term and + * version + */ + boolean supersedes(final long primaryTerm, final long version) { + return this.primaryTerm > primaryTerm || this.primaryTerm == primaryTerm && this.version > version; } private final Map leases; @@ -203,7 +217,7 @@ public static RetentionLeases fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } - static final MetaDataStateFormat FORMAT = new MetaDataStateFormat("retention-leases-") { + static final MetaDataStateFormat FORMAT = new MetaDataStateFormat<>("retention-leases-") { @Override public void toXContent(final XContentBuilder builder, final RetentionLeases retentionLeases) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ee67597efe31a..11e4fb81d9fbe 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.shard; import com.carrotsearch.hppc.ObjectLongMap; + import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.CheckIndex; @@ -433,8 +434,7 @@ public void updateShardState(final ShardRouting newRouting, final BiConsumer> primaryReplicaSyncer, final long applyingClusterStateVersion, final Set inSyncAllocationIds, - final IndexShardRoutingTable routingTable, - final Set pre60AllocationIds) throws IOException { + final IndexShardRoutingTable routingTable) throws IOException { final ShardRouting currentRouting; synchronized (mutex) { currentRouting = this.shardRouting; @@ -453,7 +453,7 @@ public void updateShardState(final ShardRouting newRouting, } if (newRouting.primary()) { - replicationTracker.updateFromMaster(applyingClusterStateVersion, inSyncAllocationIds, routingTable, pre60AllocationIds); + replicationTracker.updateFromMaster(applyingClusterStateVersion, inSyncAllocationIds, routingTable); } if (state == IndexShardState.POST_RECOVERY && newRouting.active()) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardId.java b/server/src/main/java/org/elasticsearch/index/shard/ShardId.java index 77e1ae00e964f..e3becbef7dd1e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardId.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardId.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -33,20 +32,12 @@ /** * Allows for shard level components to be injected with the shard id. */ -public class ShardId implements Streamable, Comparable, ToXContentFragment, Writeable { +public class ShardId implements Comparable, ToXContentFragment, Writeable { private final Index index; - private final int shardId; - private final int hashCode; - public ShardId(StreamInput in) throws IOException { - index = new Index(in); - shardId = in.readVInt(); - hashCode = computeHashCode(); - } - public ShardId(Index index, int shardId) { this.index = index; this.shardId = shardId; @@ -57,6 +48,18 @@ public ShardId(String index, String indexUUID, int shardId) { this(new Index(index, indexUUID), shardId); } + public ShardId(StreamInput in) throws IOException { + index = new Index(in); + shardId = in.readVInt(); + hashCode = computeHashCode(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + index.writeTo(out); + out.writeVInt(shardId); + } + public Index getIndex() { return index; } @@ -113,21 +116,6 @@ private int computeHashCode() { return result; } - public static ShardId readShardId(StreamInput in) throws IOException { - return new ShardId(in); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - index.writeTo(out); - out.writeVInt(shardId); - } - @Override public int compareTo(ShardId o) { if (o.getId() == shardId) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index fb33cceaa49d8..4082293d9f2eb 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -41,7 +41,6 @@ import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; -import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.common.lucene.search.Queries; @@ -65,13 +64,9 @@ final class ShardSplittingQuery extends Query { private final BitSetProducer nestedParentBitSetProducer; ShardSplittingQuery(IndexMetaData indexMetaData, int shardId, boolean hasNested) { - if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_rc2)) { - throw new IllegalArgumentException("Splitting query can only be executed on an index created with version " - + Version.V_6_0_0_rc2 + " or higher"); - } this.indexMetaData = indexMetaData; this.shardId = shardId; - this.nestedParentBitSetProducer = hasNested ? newParentDocBitSetProducer(indexMetaData.getCreationVersion()) : null; + this.nestedParentBitSetProducer = hasNested ? newParentDocBitSetProducer() : null; } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { @@ -343,9 +338,9 @@ public float matchCost() { * than once. There is no point in using BitsetFilterCache#BitSetProducerWarmer since we use this only as a delete by query which is * executed on a recovery-private index writer. There is no point in caching it and it won't have a cache hit either. */ - private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCreated) { + private static BitSetProducer newParentDocBitSetProducer() { return context -> { - Query query = Queries.newNonNestedFilter(indexVersionCreated); + Query query = Queries.newNonNestedFilter(); final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index c97c19eb0f3ec..0e87b9e2357e5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.shard; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -31,7 +32,6 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.RecoverySource; @@ -119,8 +119,6 @@ boolean recoverFromLocalShards(BiConsumer mappingUpdate Sort indexSort = indexShard.getIndexSort(); final boolean hasNested = indexShard.mapperService().hasNested(); final boolean isSplit = sourceMetaData.getNumberOfShards() < indexShard.indexSettings().getNumberOfShards(); - assert isSplit == false || sourceMetaData.getCreationVersion().onOrAfter(Version.V_6_0_0_alpha1) : "for split we require a " + - "single type but the index is created before 6.0.0"; return executeRecovery(indexShard, () -> { logger.debug("starting recovery from local shards {}", shards); try { diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index a05870b842f2d..b53c3d8da427c 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.DocumentMapperForType; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; @@ -235,12 +234,7 @@ private static Analyzer getAnalyzerAtField(IndexShard indexShard, String field, analyzer = mapperService.getIndexAnalyzers().get(perFieldAnalyzer.get(field).toString()); } else { MappedFieldType fieldType = mapperService.fullName(field); - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) fieldType; - analyzer = keywordFieldType.normalizer() == null ? keywordFieldType.indexAnalyzer() : keywordFieldType.normalizer(); - } else { - analyzer = fieldType.indexAnalyzer(); - } + analyzer = fieldType.indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.getIndexAnalyzers().getDefaultIndexAnalyzer(); diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 841201b321549..7626270b6cdc5 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -22,7 +22,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.Version; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -216,15 +215,8 @@ private ArrayList recoverFromFiles(Checkpoint checkpoint) throws try (ReleasableLock lock = writeLock.acquire()) { logger.debug("open uncommitted translog checkpoint {}", checkpoint); - final long minGenerationToRecoverFrom; - if (checkpoint.minTranslogGeneration < 0) { - final Version indexVersionCreated = indexSettings().getIndexVersionCreated(); - assert indexVersionCreated.before(Version.V_6_0_0_beta1) : - "no minTranslogGeneration in checkpoint, but index was created with version [" + indexVersionCreated + "]"; - minGenerationToRecoverFrom = deletionPolicy.getMinTranslogGenerationForRecovery(); - } else { - minGenerationToRecoverFrom = checkpoint.minTranslogGeneration; - } + final long minGenerationToRecoverFrom = checkpoint.minTranslogGeneration; + assert minGenerationToRecoverFrom >= 0 : "minTranslogGeneration should be non-negative"; final String checkpointTranslogFile = getFilename(checkpoint.generation); // we open files in reverse order in order to validate tranlsog uuid before we start traversing the translog based on @@ -561,9 +553,10 @@ public Location add(final Operation operation) throws IOException { * @return {@code true} if the current generation should be rolled to a new generation */ public boolean shouldRollGeneration() { - final long size = this.current.sizeInBytes(); final long threshold = this.indexSettings.getGenerationThresholdSize().getBytes(); - return size > threshold; + try (ReleasableLock ignored = readLock.acquire()) { + return this.current.sizeInBytes() > threshold; + } } /** @@ -882,6 +875,7 @@ public Location(long generation, long translogLocation, int size) { this.size = size; } + @Override public String toString() { return "[generation: " + generation + ", location: " + translogLocation + ", size: " + size + "]"; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index a5f6a2e9f1190..be5e1cae4fa8e 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -66,6 +66,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -1219,7 +1220,13 @@ public void run() { } // Reschedule itself to run again if not closed if (closed.get() == false) { - threadPool.schedule(this, interval, ThreadPool.Names.SAME); + try { + threadPool.schedule(this, interval, ThreadPool.Names.SAME); + } catch (EsRejectedExecutionException e) { + if (closed.get() == false) { + throw e; + } + } } } diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 821e095fc20b0..1ce39e283b1da 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -24,7 +24,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -35,7 +34,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource.Type; import org.elasticsearch.cluster.routing.RoutingNode; @@ -94,8 +92,6 @@ import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Consumer; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.CLOSED; import static org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED; @@ -149,7 +145,7 @@ public IndicesClusterStateService( final RetentionLeaseBackgroundSyncAction retentionLeaseBackgroundSyncAction) { this( settings, - (AllocatedIndices>) indicesService, + indicesService, clusterService, threadPool, recoveryTargetService, @@ -630,21 +626,8 @@ private void updateShard(DiscoveryNodes nodes, ShardRouting shardRouting, Shard primaryTerm = indexMetaData.primaryTerm(shard.shardId().id()); final Set inSyncIds = indexMetaData.inSyncAllocationIds(shard.shardId().id()); final IndexShardRoutingTable indexShardRoutingTable = routingTable.shardRoutingTable(shardRouting.shardId()); - final Set pre60AllocationIds = indexShardRoutingTable.assignedShards() - .stream() - .flatMap(shr -> { - if (shr.relocating()) { - return Stream.of(shr, shr.getTargetRelocatingShard()); - } else { - return Stream.of(shr); - } - }) - .filter(shr -> nodes.get(shr.currentNodeId()).getVersion().before(Version.V_6_0_0_alpha1)) - .map(ShardRouting::allocationId) - .map(AllocationId::getId) - .collect(Collectors.toSet()); shard.updateShardState(shardRouting, primaryTerm, primaryReplicaSyncer::resync, clusterState.version(), - inSyncIds, indexShardRoutingTable, pre60AllocationIds); + inSyncIds, indexShardRoutingTable); } catch (Exception e) { failAndRemoveShard(shardRouting, true, "failed updating shard routing entry", e, clusterState); return; @@ -810,7 +793,7 @@ public interface Shard { * - Updates and persists the new routing value. * - Updates the primary term if this shard is a primary. * - Updates the allocation ids that are tracked by the shard if it is a primary. - * See {@link ReplicationTracker#updateFromMaster(long, Set, IndexShardRoutingTable, Set)} for details. + * See {@link ReplicationTracker#updateFromMaster(long, Set, IndexShardRoutingTable)} for details. * * @param shardRouting the new routing entry * @param primaryTerm the new primary term @@ -826,8 +809,7 @@ void updateShardState(ShardRouting shardRouting, BiConsumer> primaryReplicaSyncer, long applyingClusterStateVersion, Set inSyncAllocationIds, - IndexShardRoutingTable routingTable, - Set pre60AllocationIds) throws IOException; + IndexShardRoutingTable routingTable) throws IOException; } public interface AllocatedIndex extends Iterable, IndexComponent { diff --git a/server/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java b/server/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java index bd0451498c6ba..b44feab6e603f 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/ShardsSyncedFlushResult.java @@ -148,7 +148,7 @@ public void readFrom(StreamInput in) throws IOException { shardResponses.put(shardRouting, response); } syncId = in.readOptionalString(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); totalShards = in.readInt(); } diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 79a2d6c3c0a91..921a8f9cc7c47 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -567,7 +567,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - this.shardId = ShardId.readShardId(in); + this.shardId = new ShardId(in); } public ShardId shardId() { @@ -647,7 +647,7 @@ public ShardSyncedFlushRequest(ShardId shardId, String syncId, Engine.CommitId e @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); expectedCommitId = new Engine.CommitId(in); syncId = in.readString(); } @@ -749,7 +749,7 @@ public InFlightOpsRequest(ShardId shardId) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java index 8d847dcef91c7..2f166e59ac836 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java @@ -49,7 +49,7 @@ public class RecoveryCleanFilesRequest extends TransportRequest { RecoveryCleanFilesRequest(StreamInput in) throws IOException { super(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); snapshotFiles = new Store.MetadataSnapshot(in); totalTranslogOps = in.readVInt(); if (in.getVersion().onOrAfter(Version.V_7_2_0)) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFileChunkRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFileChunkRequest.java index 196e2cdf76ba9..59480ccbe4233 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFileChunkRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFileChunkRequest.java @@ -96,7 +96,7 @@ public long sourceThrottleTimeInNanos() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); String name = in.readString(); position = in.readVLong(); long length = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFilesInfoRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFilesInfoRequest.java index 2143127c234f5..b1fdef06ed518 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFilesInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFilesInfoRequest.java @@ -66,7 +66,7 @@ public ShardId shardId() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); int size = in.readVInt(); phase1FileNames = new ArrayList<>(size); for (int i = 0; i < size; i++) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java index c21a112320c40..82036338be7ca 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryFinalizeRecoveryRequest.java @@ -57,7 +57,7 @@ public long globalCheckpoint() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); globalCheckpoint = in.readZLong(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryHandoffPrimaryContextRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryHandoffPrimaryContextRequest.java index a4a87cf2d60ba..bccb917646fe7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryHandoffPrimaryContextRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryHandoffPrimaryContextRequest.java @@ -72,7 +72,7 @@ ReplicationTracker.PrimaryContext primaryContext() { public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); primaryContext = new ReplicationTracker.PrimaryContext(in); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java index 1d2be7bafda8a..6e2557176a82e 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryPrepareForTranslogOperationsRequest.java @@ -43,7 +43,7 @@ class RecoveryPrepareForTranslogOperationsRequest extends TransportRequest { RecoveryPrepareForTranslogOperationsRequest(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); totalTranslogOps = in.readVInt(); fileBasedRecovery = in.readBoolean(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java index 7393eccc44e86..18018fc7db054 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryState.java @@ -134,7 +134,7 @@ public RecoveryState(ShardRouting shardRouting, DiscoveryNode targetNode, @Nulla public RecoveryState(StreamInput in) throws IOException { timer = new Timer(in); stage = Stage.fromId(in.readByte()); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); recoverySource = RecoverySource.readFrom(in); targetNode = new DiscoveryNode(in); sourceNode = in.readOptionalWriteable(DiscoveryNode::new); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index ce1eb3ac85589..1f2c9a0f578cc 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -26,7 +26,6 @@ import org.elasticsearch.Assertions; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.UUIDs; @@ -39,7 +38,6 @@ import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.RetentionLeases; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardNotRecoveringException; import org.elasticsearch.index.shard.IndexShardState; @@ -288,9 +286,6 @@ public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTra ActionListener.completeWith(listener, () -> { state().getTranslog().totalOperations(totalTranslogOps); indexShard().openEngineAndSkipTranslogRecovery(); - assert indexShard.getGlobalCheckpoint() >= indexShard.seqNoStats().getMaxSeqNo() || - indexShard.indexSettings().getIndexVersionCreated().before(Version.V_7_2_0) - : "global checkpoint is not initialized [" + indexShard.seqNoStats() + "]"; return null; }); } @@ -396,9 +391,6 @@ public void cleanFiles(int totalTranslogOps, long globalCheckpoint, Store.Metada store.incRef(); try { store.cleanupAndVerify("recovery CleanFilesRequestHandler", sourceMetaData); - assert globalCheckpoint >= Long.parseLong(sourceMetaData.getCommitUserData().get(SequenceNumbers.MAX_SEQ_NO)) - || indexShard.indexSettings().getIndexVersionCreated().before(Version.V_7_2_0) : - "invalid global checkpoint[" + globalCheckpoint + "] source_meta_data [" + sourceMetaData.getCommitUserData() + "]"; final String translogUUID = Translog.createEmptyTranslog( indexShard.shardPath().resolveTranslog(), globalCheckpoint, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index 9608455216746..ca3d85de419b9 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -100,7 +100,7 @@ long mappingVersionOnPrimary() { RecoveryTranslogOperationsRequest(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); operations = Translog.readOperations(in, "recovery"); totalTranslogOps = in.readVInt(); maxSeenAutoIdTimestampOnPrimary = in.readZLong(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryWaitForClusterStateRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryWaitForClusterStateRequest.java index e45cf3f7d1611..d8ac7e59d73d1 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryWaitForClusterStateRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryWaitForClusterStateRequest.java @@ -56,7 +56,7 @@ public long clusterStateVersion() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); clusterStateVersion = in.readVLong(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java index 6c4e7b744b729..4ec20d17ac5be 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java @@ -114,7 +114,7 @@ public long startingSeqNo() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); targetAllocationId = in.readString(); sourceNode = new DiscoveryNode(in); targetNode = new DiscoveryNode(in); diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 7a07d8f62b229..bfb271c7b089e 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -412,7 +412,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); clusterName = new ClusterName(in); indexUUID = in.readString(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); timeout = new TimeValue(in.readLong(), TimeUnit.MILLISECONDS); } diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 5ebbdab39835b..bc041b4b322ae 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -203,7 +203,7 @@ public static StoreFilesMetaData readStoreFilesMetaData(StreamInput in) throws I @Override public void readFrom(StreamInput in) throws IOException { - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); this.metadataSnapshot = new Store.MetadataSnapshot(in); } @@ -245,7 +245,7 @@ public Request(ShardId shardId, DiscoveryNode[] nodes) { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override @@ -288,7 +288,7 @@ public NodeRequest() { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 4e8b81aea2e7b..782101763b7a4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.InternalClusterInfoService; @@ -271,8 +272,9 @@ protected Node( nodeEnvironment = new NodeEnvironment(tmpSettings, environment); resourcesToClose.add(nodeEnvironment); - logger.info("node name [{}], node ID [{}]", - NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId()); + logger.info("node name [{}], node ID [{}], cluster name [{}]", + NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId(), + ClusterName.CLUSTER_NAME_SETTING.get(tmpSettings).value()); final JvmInfo jvmInfo = JvmInfo.jvmInfo(); logger.info( @@ -378,7 +380,6 @@ protected Node( PageCacheRecycler pageCacheRecycler = createPageCacheRecycler(settings); BigArrays bigArrays = createBigArrays(pageCacheRecycler, circuitBreakerService); - resourcesToClose.add(pageCacheRecycler); modules.add(settingsModule); List namedWriteables = Stream.of( NetworkModule.getNamedWriteables().stream(), @@ -471,8 +472,8 @@ protected Node( ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); - final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, nodeEnvironment, metaStateService, - metaDataIndexUpgradeService, metaDataUpgrader, transportService, clusterService, indicesService); + final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, metaStateService, + metaDataIndexUpgradeService, metaDataUpgrader, transportService, clusterService); final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); final SearchTransportService searchTransportService = new SearchTransportService(transportService, SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); @@ -841,13 +842,13 @@ public synchronized void close() throws IOException { // Don't call shutdownNow here, it might break ongoing operations on Lucene indices. // See https://issues.apache.org/jira/browse/LUCENE-7248. We call shutdownNow in // awaitClose if the node doesn't finish closing within the specified time. - toClose.add(() -> stopWatch.stop()); + toClose.add(() -> stopWatch.stop().start("node_environment")); toClose.add(injector.getInstance(NodeEnvironment.class)); - toClose.add(injector.getInstance(PageCacheRecycler.class)); + toClose.add(stopWatch::stop); if (logger.isTraceEnabled()) { - logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint()); + toClose.add(() -> logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint())); } IOUtils.close(toClose); logger.info("closed"); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java index 617fc6998a62a..044caee41c55d 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java @@ -97,13 +97,6 @@ public void delete(BlobContainer blobContainer, String name) throws IOException blobContainer.deleteBlob(blobName(name)); } - /** - * Checks obj in the blob container - */ - public boolean exists(BlobContainer blobContainer, String name) { - return blobContainer.blobExists(blobName(name)); - } - public String blobName(String name) { return String.format(Locale.ROOT, blobNameFormat, name); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 39a4011783a55..5ed73a0058cc5 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -395,9 +395,6 @@ public void initializeSnapshot(SnapshotId snapshotId, List indices, Met if (repositoryData.getAllSnapshotIds().stream().anyMatch(s -> s.getName().equals(snapshotName))) { throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists"); } - if (snapshotFormat.exists(blobContainer(), snapshotId.getUUID())) { - throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists"); - } // Write Global MetaData globalMetaDataFormat.write(clusterMetaData, blobContainer(), snapshotId.getUUID()); @@ -634,7 +631,10 @@ public String startVerification() { public void endVerification(String seed) { if (isReadOnly() == false) { try { - blobStore().delete(basePath().add(testBlobPrefix(seed))); + final String testPrefix = testBlobPrefix(seed); + final BlobContainer container = blobStore().blobContainer(basePath().add(testPrefix)); + container.deleteBlobsIgnoringIfNotExists(List.copyOf(container.listBlobs().keySet())); + blobStore().blobContainer(basePath()).deleteBlobIgnoringIfNotExists(testPrefix); } catch (IOException exp) { throw new RepositoryVerificationException(metadata.name(), "cannot delete test data at " + basePath(), exp); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java index bacc698b2a4f7..20370b27d4373 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java @@ -36,7 +36,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestNodesInfoAction extends BaseRestHandler { - private static final Set ALLOWED_METRICS = Sets.newHashSet( + static final Set ALLOWED_METRICS = Sets.newHashSet( "http", "ingest", "indices", @@ -69,6 +69,13 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final NodesInfoRequest nodesInfoRequest = prepareRequest(request); + settingsFilter.addFilterSettingParams(request); + + return channel -> client.admin().cluster().nodesInfo(nodesInfoRequest, new NodesResponseRestListener<>(channel)); + } + + static NodesInfoRequest prepareRequest(final RestRequest request) { String[] nodeIds; Set metrics; @@ -76,17 +83,18 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC // still, /_nodes/_local (or any other node id) should work and be treated as usual // this means one must differentiate between allowed metrics and arbitrary node ids in the same place if (request.hasParam("nodeId") && !request.hasParam("metrics")) { - Set metricsOrNodeIds = Strings.tokenizeByCommaToSet(request.param("nodeId", "_all")); + String nodeId = request.param("nodeId", "_all"); + Set metricsOrNodeIds = Strings.tokenizeByCommaToSet(nodeId); boolean isMetricsOnly = ALLOWED_METRICS.containsAll(metricsOrNodeIds); if (isMetricsOnly) { nodeIds = new String[]{"_all"}; metrics = metricsOrNodeIds; } else { - nodeIds = metricsOrNodeIds.toArray(new String[]{}); + nodeIds = Strings.tokenizeToStringArray(nodeId, ","); metrics = Sets.newHashSet("_all"); } } else { - nodeIds = Strings.splitStringByCommaToArray(request.param("nodeId", "_all")); + nodeIds = Strings.tokenizeToStringArray(request.param("nodeId", "_all"), ","); metrics = Strings.tokenizeByCommaToSet(request.param("metrics", "_all")); } @@ -108,10 +116,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC nodesInfoRequest.ingest(metrics.contains("ingest")); nodesInfoRequest.indices(metrics.contains("indices")); } - - settingsFilter.addFilterSettingParams(request); - - return channel -> client.admin().cluster().nodesInfo(nodesInfoRequest, new NodesResponseRestListener<>(channel)); + return nodesInfoRequest; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index bdb0052698253..075167e97ccb2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -19,12 +19,10 @@ package org.elasticsearch.rest.action.document; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.BaseRestHandler; @@ -38,10 +36,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestMultiTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestTermVectorsAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + - "Specifying types in multi term vector requests is deprecated."; public RestMultiTermVectorsAction(Settings settings, RestController controller) { super(settings); @@ -49,10 +43,6 @@ public RestMultiTermVectorsAction(Settings settings, RestController controller) controller.registerHandler(POST, "/_mtermvectors", this); controller.registerHandler(GET, "/{index}/_mtermvectors", this); controller.registerHandler(POST, "/{index}/_mtermvectors", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_mtermvectors", this); - controller.registerHandler(POST, "/{index}/{type}/_mtermvectors", this); } @Override @@ -66,12 +56,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC TermVectorsRequest template = new TermVectorsRequest() .index(request.param("index")); - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("mtermvectors_with_types", TYPES_DEPRECATION_MESSAGE); - template.type(request.param("type")); - } else { - template.type(MapperService.SINGLE_MAPPING_NAME); - } + + template.type(MapperService.SINGLE_MAPPING_NAME); RestTermVectorsAction.readURIParameters(template, request); multiTermVectorsRequest.ids(Strings.commaDelimitedListToStringArray(request.param("ids"))); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index 85ddd3b58ed49..3e136b368280d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -19,11 +19,9 @@ package org.elasticsearch.rest.action.document; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; @@ -46,10 +44,8 @@ * TermVectorsRequest. */ public class RestTermVectorsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestTermVectorsAction.class)); public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + - "Specifying types in term vector requests is deprecated."; + "Specifying types in term vector requests is deprecated."; public RestTermVectorsAction(Settings settings, RestController controller) { super(settings); @@ -57,12 +53,6 @@ public RestTermVectorsAction(Settings settings, RestController controller) { controller.registerHandler(POST, "/{index}/_termvectors", this); controller.registerHandler(GET, "/{index}/_termvectors/{id}", this); controller.registerHandler(POST, "/{index}/_termvectors/{id}", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_termvectors", this); - controller.registerHandler(POST, "/{index}/{type}/_termvectors", this); - controller.registerHandler(GET, "/{index}/{type}/{id}/_termvectors", this); - controller.registerHandler(POST, "/{index}/{type}/{id}/_termvectors", this); } @Override @@ -73,16 +63,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { TermVectorsRequest termVectorsRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("termvectors_with_types", TYPES_DEPRECATION_MESSAGE); - termVectorsRequest = new TermVectorsRequest(request.param("index"), - request.param("type"), - request.param("id")); - } else { - termVectorsRequest = new TermVectorsRequest(request.param("index"), - MapperService.SINGLE_MAPPING_NAME, - request.param("id")); - } + termVectorsRequest = new TermVectorsRequest(request.param("index"), MapperService.SINGLE_MAPPING_NAME, request.param("id")); if (request.hasContentOrSourceParam()) { try (XContentParser parser = request.contentOrSourceParamParser()) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 05a20a0cc06b9..d9beba089857f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.search; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -28,7 +27,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; @@ -50,10 +48,6 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestMultiSearchAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestMultiSearchAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in multi search requests is deprecated."; private static final Set RESPONSE_PARAMS; @@ -73,10 +67,6 @@ public RestMultiSearchAction(Settings settings, RestController controller) { controller.registerHandler(GET, "/{index}/_msearch", this); controller.registerHandler(POST, "/{index}/_msearch", this); - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_msearch", this); - controller.registerHandler(POST, "/{index}/{type}/_msearch", this); - this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } @@ -88,14 +78,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex); - - // Emit a single deprecation message if any search request contains types. - for (SearchRequest searchRequest : multiSearchRequest.requests()) { - if (searchRequest.types().length > 0) { - deprecationLogger.deprecatedAndMaybeLog("msearch_with_types", TYPES_DEPRECATION_MESSAGE); - break; - } - } return channel -> client.multiSearch(multiSearchRequest, new RestToXContentListener<>(channel)); } @@ -145,7 +127,6 @@ public static void parseMultiLineRequest(RestRequest request, IndicesOptions ind CheckedBiConsumer consumer) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - String[] types = Strings.splitStringByCommaToArray(request.param("type")); String searchType = request.param("search_type"); boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true); String routing = request.param("routing"); @@ -153,7 +134,7 @@ public static void parseMultiLineRequest(RestRequest request, IndicesOptions ind final Tuple sourceTuple = request.contentOrSourceParam(); final XContent xContent = sourceTuple.v1().xContent(); final BytesReference data = sourceTuple.v2(); - MultiSearchRequest.readMultiLineFormat(data, xContent, consumer, indices, indicesOptions, types, routing, + MultiSearchRequest.readMultiLineFormat(data, xContent, consumer, indices, indicesOptions, Strings.EMPTY_ARRAY, routing, searchType, ccsMinimizeRoundtrips, request.getXContentRegistry(), allowExplicitIndex); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 00c08a124f1e4..95695bec4f0c1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -19,13 +19,11 @@ package org.elasticsearch.rest.action.search; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; @@ -69,20 +67,12 @@ public class RestSearchAction extends BaseRestHandler { RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestSearchAction.class)); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in search requests is deprecated."; - public RestSearchAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(GET, "/_search", this); controller.registerHandler(POST, "/_search", this); controller.registerHandler(GET, "/{index}/_search", this); controller.registerHandler(POST, "/{index}/_search", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/_search", this); - controller.registerHandler(POST, "/{index}/{type}/_search", this); } @Override @@ -166,10 +156,6 @@ public static void parseSearchRequest(SearchRequest searchRequest, RestRequest r searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll"))); } - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("search_with_types", TYPES_DEPRECATION_MESSAGE); - searchRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index d9a7fdb831efd..f0eaa0d51dadf 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -278,7 +278,7 @@ public Query buildFilteredQuery(Query query) { && typeFilter == null // when a _type filter is set, it will automatically exclude nested docs && new NestedHelper(mapperService()).mightMatchNestedDocs(query) && (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) { - filters.add(Queries.newNonNestedFilter(mapperService().getIndexSettings().getIndexVersionCreated())); + filters.add(Queries.newNonNestedFilter()); } if (aliasFilter != null) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java index f242cbd8eb901..39e6adede885b 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java @@ -19,11 +19,13 @@ package org.elasticsearch.search; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.TransportResponse; +import java.io.IOException; + /** * This class is a base class for all search related results. It contains the shard target it * was executed against, a shard index used to reference the result on the coordinating node @@ -32,15 +34,22 @@ * across search phases to ensure the same point in time snapshot is used for querying and * fetching etc. */ -public abstract class SearchPhaseResult extends TransportResponse implements Streamable { +public abstract class SearchPhaseResult extends TransportResponse { private SearchShardTarget searchShardTarget; private int shardIndex = -1; protected long requestId; + protected SearchPhaseResult() { + + } + + protected SearchPhaseResult(StreamInput in) throws IOException { + super(in); + } + /** - * Returns the results request ID that is used to reference the search context on the executing - * node + * Returns the results request ID that is used to reference the search context on the executing node */ public long getRequestId() { return requestId; @@ -79,4 +88,9 @@ public QuerySearchResult queryResult() { * Returns the fetch result iff it's included in this response otherwise null */ public FetchSearchResult fetchResult() { return null; } + + @Override + public final void readFrom(StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 0b22f5d660655..b703493b4d505 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; @@ -39,7 +40,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.core.internal.io.IOUtils; @@ -302,21 +302,7 @@ protected void doClose() { } public void executeDfsPhase(ShardSearchRequest request, SearchTask task, ActionListener listener) { - rewriteShardRequest(request, new ActionListener() { - @Override - public void onResponse(ShardSearchRequest request) { - try { - listener.onResponse(executeDfsPhase(request, task)); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + rewriteShardRequest(request, ActionListener.map(listener, r -> executeDfsPhase(r, task))); } private DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchTask task) throws IOException { @@ -351,30 +337,11 @@ private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final Sea } public void executeQueryPhase(ShardSearchRequest request, SearchTask task, ActionListener listener) { - rewriteShardRequest(request, new ActionListener() { - @Override - public void onResponse(ShardSearchRequest request) { - try { - listener.onResponse(executeQueryPhase(request, task)); - } catch (Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); + rewriteShardRequest(request, ActionListener.map(listener, r -> executeQueryPhase(r, task))); } private void runAsync(long id, Supplier executable, ActionListener listener) { - getExecutor(id).execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - + getExecutor(id).execute(new ActionRunnable(listener) { @Override protected void doRun() { listener.onResponse(executable.get()); @@ -1058,12 +1025,7 @@ private void rewriteShardRequest(ShardSearchRequest request, ActionListener actionListener = ActionListener.wrap(r -> // now we need to check if there is a pending refresh and register shard.awaitShardSearchActive(b -> - executor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - + executor.execute(new ActionRunnable(listener) { @Override protected void doRun() { listener.onResponse(request); @@ -1093,9 +1055,10 @@ public InternalAggregation.ReduceContext createReduceContext(boolean finalReduce } public static final class CanMatchResponse extends SearchPhaseResult { - private boolean canMatch; + private final boolean canMatch; public CanMatchResponse(StreamInput in) throws IOException { + super(in); this.canMatch = in.readBoolean(); } @@ -1103,12 +1066,6 @@ public CanMatchResponse(boolean canMatch) { this.canMatch = canMatch; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - canMatch = in.readBoolean(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 6aadf8425997d..4bc3e3ae986da 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -49,7 +49,7 @@ public SearchShardTarget(StreamInput in) throws IOException { } else { nodeId = null; } - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); this.originalIndices = null; clusterAlias = in.readOptionalString(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java index af9ea84ec9de8..ae9000b4b75ff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregations.java @@ -34,6 +34,7 @@ import java.util.Map; import java.util.Objects; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.xcontent.XContentParserUtils.parseTypedKeysObject; @@ -44,14 +45,14 @@ public class Aggregations implements Iterable, ToXContentFragment { public static final String AGGREGATIONS_FIELD = "aggregations"; - protected List aggregations = Collections.emptyList(); - protected Map aggregationsAsMap; - - protected Aggregations() { - } + protected final List aggregations; + private Map aggregationsAsMap; public Aggregations(List aggregations) { this.aggregations = aggregations; + if (aggregations.isEmpty()) { + aggregationsAsMap = emptyMap(); + } } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 8910ca25c337d..e1597c5c8c063 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; @@ -34,14 +34,13 @@ import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; - /** * An internal implementation of {@link Aggregations}. */ -public final class InternalAggregations extends Aggregations implements Streamable { +public final class InternalAggregations extends Aggregations implements Writeable { + + public static final InternalAggregations EMPTY = new InternalAggregations(Collections.emptyList()); - public static final InternalAggregations EMPTY = new InternalAggregations(); private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { if (agg1.isMapped() == agg2.isMapped()) { return 0; @@ -52,16 +51,14 @@ public final class InternalAggregations extends Aggregations implements Streamab } }; - private List topLevelPipelineAggregators = Collections.emptyList(); - - private InternalAggregations() { - } + private final List topLevelPipelineAggregators; /** * Constructs a new aggregation. */ public InternalAggregations(List aggregations) { super(aggregations); + this.topLevelPipelineAggregators = Collections.emptyList(); } /** @@ -72,6 +69,19 @@ public InternalAggregations(List aggregations, List in.readNamedWriteable(InternalAggregation.class))); + this.topLevelPipelineAggregators = in.readList( + stream -> (SiblingPipelineAggregator)in.readNamedWriteable(PipelineAggregator.class)); + } + + @Override + @SuppressWarnings("unchecked") + public void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteableList((List)aggregations); + out.writeNamedWriteableList(topLevelPipelineAggregators); + } + /** * Returns the top-level pipeline aggregators. * Note that top-level pipeline aggregators become normal aggregation once the final reduction has been performed, after which they @@ -86,8 +96,7 @@ public List getTopLevelPipelineAggregators() { * {@link InternalAggregations} object found in the list. * Note that top-level pipeline aggregators are reduced only as part of the final reduction phase, otherwise they are left untouched. */ - public static InternalAggregations reduce(List aggregationsList, - ReduceContext context) { + public static InternalAggregations reduce(List aggregationsList, ReduceContext context) { if (aggregationsList.isEmpty()) { return null; } @@ -123,27 +132,4 @@ public static InternalAggregations reduce(List aggregation } return new InternalAggregations(reducedAggregations, topLevelPipelineAggregators); } - - public static InternalAggregations readAggregations(StreamInput in) throws IOException { - InternalAggregations result = new InternalAggregations(); - result.readFrom(in); - return result; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - aggregations = in.readList(stream -> in.readNamedWriteable(InternalAggregation.class)); - if (aggregations.isEmpty()) { - aggregationsAsMap = emptyMap(); - } - this.topLevelPipelineAggregators = in.readList( - stream -> (SiblingPipelineAggregator)in.readNamedWriteable(PipelineAggregator.class)); - } - - @Override - @SuppressWarnings("unchecked") - public void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteableList((List)aggregations); - out.writeNamedWriteableList(topLevelPipelineAggregators); - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java index 490c7a3687844..5626c8f21859d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java @@ -60,7 +60,7 @@ protected InternalSingleBucketAggregation(String name, long docCount, InternalAg protected InternalSingleBucketAggregation(StreamInput in) throws IOException { super(in); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 8ce6304daf8ea..c862f458939ed 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -58,7 +58,7 @@ public InternalBucket(String key, long docCount, InternalAggregations aggregatio public InternalBucket(StreamInput in) throws IOException { key = in.readOptionalString(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 715537597d906..e6a7edd8c217d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -237,7 +237,7 @@ public static class InternalBucket extends InternalMultiBucketAggregation.Intern InternalBucket(StreamInput in, List sourceNames, List formats, int[] reverseMuls) throws IOException { this.key = new CompositeKey(in); this.docCount = in.readVLong(); - this.aggregations = InternalAggregations.readAggregations(in); + this.aggregations = new InternalAggregations(in); this.reverseMuls = reverseMuls; this.sourceNames = sourceNames; this.formats = formats; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java index 810126e851251..54dfc301b2dbc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregationBuilder.java @@ -40,6 +40,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -47,7 +48,7 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; public class FiltersAggregationBuilder extends AbstractAggregationBuilder - implements MultiBucketAggregationBuilder { + implements MultiBucketAggregationBuilder { public static final String NAME = "filters"; private static final ParseField FILTERS_FIELD = new ParseField("filters"); @@ -74,7 +75,7 @@ private FiltersAggregationBuilder(String name, List filters, boolea this.filters = new ArrayList<>(filters); if (keyed) { // internally we want to have a fixed order of filters, regardless of the order of the filters in the request - Collections.sort(this.filters, (KeyedFilter kf1, KeyedFilter kf2) -> kf1.key().compareTo(kf2.key())); + this.filters.sort(Comparator.comparing(KeyedFilter::key)); this.keyed = true; } else { this.keyed = false; @@ -220,9 +221,9 @@ protected AggregationBuilder doRewrite(QueryRewriteContext queryShardContext) th @Override protected AggregatorFactory doBuild(SearchContext context, AggregatorFactory parent, Builder subFactoriesBuilder) - throws IOException { + throws IOException { return new FiltersAggregatorFactory(name, filters, keyed, otherBucket, otherBucketKey, context, parent, - subFactoriesBuilder, metaData); + subFactoriesBuilder, metaData); } @Override @@ -248,15 +249,15 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param } public static FiltersAggregationBuilder parse(String aggregationName, XContentParser parser) - throws IOException { + throws IOException { - List keyedFilters = null; - List nonKeyedFilters = null; + List filters = new ArrayList<>(); - XContentParser.Token token = null; + XContentParser.Token token; String currentFieldName = null; String otherBucketKey = null; Boolean otherBucket = null; + boolean keyed = false; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -265,61 +266,61 @@ public static FiltersAggregationBuilder parse(String aggregationName, XContentPa otherBucket = parser.booleanValue(); } else { throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.VALUE_STRING) { if (OTHER_BUCKET_KEY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { otherBucketKey = parser.text(); } else { throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.START_OBJECT) { if (FILTERS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - keyedFilters = new ArrayList<>(); String key = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { key = parser.currentName(); } else { QueryBuilder filter = parseInnerQueryBuilder(parser); - keyedFilters.add(new FiltersAggregator.KeyedFilter(key, filter)); + filters.add(new FiltersAggregator.KeyedFilter(key, filter)); } } + keyed = true; } else { throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.START_ARRAY) { if (FILTERS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - nonKeyedFilters = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + List builders = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { QueryBuilder filter = parseInnerQueryBuilder(parser); - nonKeyedFilters.add(filter); + builders.add(filter); + } + for (int i = 0; i < builders.size(); i++) { + filters.add(new KeyedFilter(String.valueOf(i), builders.get(i))); } } else { throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } else { throw new ParsingException(parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); + "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."); } } + if (filters.isEmpty()) { + throw new IllegalArgumentException("[" + FILTERS_FIELD + "] cannot be empty."); + } + + FiltersAggregationBuilder factory = new FiltersAggregationBuilder(aggregationName, filters, keyed); + if (otherBucket == null && otherBucketKey != null) { // automatically enable the other bucket if a key is set, as per the doc otherBucket = true; } - - FiltersAggregationBuilder factory; - if (keyedFilters != null) { - factory = new FiltersAggregationBuilder(aggregationName, - keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()])); - } else { - factory = new FiltersAggregationBuilder(aggregationName, - nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()])); - } if (otherBucket != null) { factory.otherBucket(otherBucket); } @@ -338,9 +339,9 @@ protected int doHashCode() { protected boolean doEquals(Object obj) { FiltersAggregationBuilder other = (FiltersAggregationBuilder) obj; return Objects.equals(filters, other.filters) - && Objects.equals(keyed, other.keyed) - && Objects.equals(otherBucket, other.otherBucket) - && Objects.equals(otherBucketKey, other.otherBucketKey); + && Objects.equals(keyed, other.keyed) + && Objects.equals(otherBucket, other.otherBucket) + && Objects.equals(otherBucketKey, other.otherBucketKey); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index 56cf71b82cfdd..f6ebfa459c02d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -57,7 +57,7 @@ public InternalBucket(StreamInput in, boolean keyed) throws IOException { this.keyed = keyed; key = in.readOptionalString(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index ed699e5e3edb2..93002d607eaf8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -51,7 +51,7 @@ public InternalGeoGridBucket(long hashAsLong, long docCount, InternalAggregation public InternalGeoGridBucket(StreamInput in) throws IOException { hashAsLong = in.readLong(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java index b86989fce168d..b08782f1fd37a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateIntervalWrapper.java @@ -288,11 +288,15 @@ public Rounding createRounding(ZoneId timeZone) { } else { // We're not sure what the interval was originally (legacy) so use old behavior of assuming // calendar first, then fixed. Required because fixed/cal overlap in places ("1h") - DateTimeUnit intervalAsUnit = tryIntervalAsCalendarUnit(); - if (intervalAsUnit != null) { - tzRoundingBuilder = Rounding.builder(tryIntervalAsCalendarUnit()); + DateTimeUnit calInterval = tryIntervalAsCalendarUnit(); + TimeValue fixedInterval = tryIntervalAsFixedUnit(); + if (calInterval != null) { + tzRoundingBuilder = Rounding.builder(calInterval); + } else if (fixedInterval != null) { + tzRoundingBuilder = Rounding.builder(fixedInterval); } else { - tzRoundingBuilder = Rounding.builder(tryIntervalAsFixedUnit()); + // If we get here we have exhausted our options and are not able to parse this interval + throw new IllegalArgumentException("Unable to parse interval [" + dateHistogramInterval + "]"); } } if (timeZone != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 63d08f5e832ac..dc0eefec45c82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -73,7 +73,7 @@ public Bucket(StreamInput in, DocValueFormat format) throws IOException { this.format = format; key = in.readLong(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override @@ -175,7 +175,7 @@ static class BucketInfo { roundingInfos[i] = new RoundingInfo(in); } roundingIdx = in.readVInt(); - emptySubAggregations = InternalAggregations.readAggregations(in); + emptySubAggregations = new InternalAggregations(in); } void writeTo(StreamOutput out) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index c15182f97a106..162bfde5acf0f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -77,7 +77,7 @@ public Bucket(StreamInput in, boolean keyed, DocValueFormat format) throws IOExc this.keyed = keyed; key = in.readLong(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override @@ -186,7 +186,7 @@ static class EmptyBucketInfo { EmptyBucketInfo(StreamInput in) throws IOException { rounding = Rounding.read(in); - subAggregations = InternalAggregations.readAggregations(in); + subAggregations = new InternalAggregations(in); bounds = in.readOptionalWriteable(ExtendedBounds::new); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index dd942f5bf6be2..b324241e9bb6d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -73,7 +73,7 @@ public Bucket(StreamInput in, boolean keyed, DocValueFormat format) throws IOExc this.keyed = keyed; key = in.readDouble(); docCount = in.readVLong(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override @@ -178,7 +178,7 @@ static class EmptyBucketInfo { } EmptyBucketInfo(StreamInput in) throws IOException { - this(in.readDouble(), in.readDouble(), in.readDouble(), in.readDouble(), InternalAggregations.readAggregations(in)); + this(in.readDouble(), in.readDouble(), in.readDouble(), in.readDouble(), new InternalAggregations(in)); } public void writeTo(StreamOutput out) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 3e3c07bc42398..68e46b37bb064 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -66,7 +66,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA super(name, factories, context, parentAggregator, pipelineAggregators, metaData); Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() - : Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + : Queries.newNonNestedFilter(); this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter); this.childFilter = childObjectMapper.nestedTypeFilter(); this.collectsFromSingleBucket = collectsFromSingleBucket; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 415ae39c71e00..2f29f8f2cdcfc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -54,7 +54,7 @@ public ReverseNestedAggregator(String name, AggregatorFactories factories, Objec throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); if (objectMapper == null) { - parentFilter = Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } else { parentFilter = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 3caae6ac34505..35ee592d6c9ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -77,7 +77,7 @@ private static Bucket createFromStream(StreamInput in, DocValueFormat format, bo BytesRef from = in.readBoolean() ? in.readBytesRef() : null; BytesRef to = in.readBoolean() ? in.readBytesRef() : null; long docCount = in.readLong(); - InternalAggregations aggregations = InternalAggregations.readAggregations(in); + InternalAggregations aggregations = new InternalAggregations(in); return new Bucket(format, keyed, key, from, to, docCount, aggregations); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 2427104d0c0f5..747cb22c87071 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -256,7 +256,7 @@ public InternalRange(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { String key = in.readString(); ranges.add(getFactory().createBucket(key, in.readDouble(), in.readDouble(), in.readVLong(), - InternalAggregations.readAggregations(in), keyed, format)); + new InternalAggregations(in), keyed, format)); } this.ranges = ranges; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTerms.java index 2d22b61472a5a..fd4eec825774e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTerms.java @@ -59,7 +59,7 @@ static class Bucket extends InternalSignificantTerms.Bucket { supersetDf = in.readVLong(); term = in.readLong(); score = in.readDouble(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java index a73ee1818cf6a..551ecd6a9f23e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTerms.java @@ -57,7 +57,7 @@ public Bucket(StreamInput in, long subsetSize, long supersetSize, DocValueFormat subsetDf = in.readVLong(); supersetDf = in.readVLong(); score = in.readDouble(); - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 72a641ea5bb60..f40ff84bf2130 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -85,7 +85,7 @@ protected Bucket(StreamInput in, DocValueFormat formatter, boolean showDocCountE if (showDocCountError) { docCountError = in.readLong(); } - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index dbe566a4ddbb2..1d61cd565d69c 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -42,11 +42,22 @@ public class DfsSearchResult extends SearchPhaseResult { private ObjectObjectHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); private int maxDoc; - public DfsSearchResult() { - } - public DfsSearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + requestId = in.readLong(); + int termsSize = in.readVInt(); + if (termsSize == 0) { + terms = EMPTY_TERMS; + } else { + terms = new Term[termsSize]; + for (int i = 0; i < terms.length; i++) { + terms[i] = new Term(in.readString(), in.readBytesRef()); + } + } + this.termStatistics = readTermStats(in, terms); + fieldStatistics = readFieldStats(in); + + maxDoc = in.readVInt(); } public DfsSearchResult(long id, SearchShardTarget shardTarget) { @@ -86,26 +97,6 @@ public ObjectObjectHashMap fieldStatistics() { return fieldStatistics; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - requestId = in.readLong(); - int termsSize = in.readVInt(); - if (termsSize == 0) { - terms = EMPTY_TERMS; - } else { - terms = new Term[termsSize]; - for (int i = 0; i < terms.length; i++) { - terms[i] = new Term(in.readString(), in.readBytesRef()); - } - } - this.termStatistics = readTermStats(in, terms); - readFieldStats(in, fieldStatistics); - - - maxDoc = in.readVInt(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -154,16 +145,9 @@ public static void writeSingleTermStats(StreamOutput out, TermStatistics termSt } } - public static ObjectObjectHashMap readFieldStats(StreamInput in) throws IOException { - return readFieldStats(in, null); - } - - public static ObjectObjectHashMap readFieldStats(StreamInput in, - ObjectObjectHashMap fieldStatistics) throws IOException { + static ObjectObjectHashMap readFieldStats(StreamInput in) throws IOException { final int numFieldStatistics = in.readVInt(); - if (fieldStatistics == null) { - fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics); - } + ObjectObjectHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics); for (int i = 0; i < numFieldStatistics; i++) { final String field = in.readString(); assert field != null; @@ -178,7 +162,7 @@ public static ObjectObjectHashMap readFieldStats(S return fieldStatistics; } - public static TermStatistics[] readTermStats(StreamInput in, Term[] terms) throws IOException { + static TermStatistics[] readTermStats(StreamInput in, Term[] terms) throws IOException { int termsStatsSize = in.readVInt(); final TermStatistics[] termStatistics; if (termsStatsSize == 0) { @@ -200,7 +184,6 @@ public static TermStatistics[] readTermStats(StreamInput in, Term[] terms) throw return termStatistics; } - /* * optional statistics are set to -1 in lucene by default. * Since we are using var longs to encode values we add one to each value @@ -211,7 +194,6 @@ public static long addOne(long value) { return value + 1; } - /* * See #addOne this just subtracting one and asserts that the actual value * is positive. @@ -220,5 +202,4 @@ public static long subOne(long value) { assert value >= 0; return value - 1; } - } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index ab2f864bfce35..c23be0f4cb994 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -190,7 +190,7 @@ public void execute(SearchContext context) { private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { if (context.mapperService().hasNested()) { BitSet bits = context.bitsetFilterCache() - .getBitSetProducer(Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated())) + .getBitSetProducer(Queries.newNonNestedFilter()) .getBitSet(subReaderContext); if (!bits.get(subDocId)) { return bits.nextSetBit(subDocId); @@ -363,7 +363,7 @@ private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context } parentFilter = nestedParentObjectMapper.nestedTypeFilter(); } else { - parentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } Query childFilter = nestedObjectMapper.nestedTypeFilter(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 400ab3623c0e8..6e183f1148384 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -39,7 +39,9 @@ public FetchSearchResult() { } public FetchSearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + requestId = in.readLong(); + hits = new SearchHits(in); } public FetchSearchResult(long id, SearchShardTarget shardTarget) { @@ -82,19 +84,6 @@ public int counterGetAndIncrement() { return counter++; } - public static FetchSearchResult readFetchSearchResult(StreamInput in) throws IOException { - FetchSearchResult result = new FetchSearchResult(); - result.readFrom(in); - return result; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - requestId = in.readLong(); - hits = new SearchHits(in); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java index 0a5a7cec375db..e87bd6d5e9d63 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java @@ -27,19 +27,15 @@ import java.io.IOException; -import static org.elasticsearch.search.fetch.FetchSearchResult.readFetchSearchResult; -import static org.elasticsearch.search.query.QuerySearchResult.readQuerySearchResult; - public final class QueryFetchSearchResult extends SearchPhaseResult { - private QuerySearchResult queryResult; - private FetchSearchResult fetchResult; - - public QueryFetchSearchResult() { - } + private final QuerySearchResult queryResult; + private final FetchSearchResult fetchResult; public QueryFetchSearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + queryResult = new QuerySearchResult(in); + fetchResult = new FetchSearchResult(in); } public QueryFetchSearchResult(QuerySearchResult queryResult, FetchSearchResult fetchResult) { @@ -81,19 +77,6 @@ public FetchSearchResult fetchResult() { return fetchResult; } - public static QueryFetchSearchResult readQueryFetchSearchResult(StreamInput in) throws IOException { - QueryFetchSearchResult result = new QueryFetchSearchResult(); - result.readFrom(in); - return result; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - queryResult = readQuerySearchResult(in); - fetchResult = readFetchSearchResult(in); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java index 6b0a8b619bff3..0785fafdc2189 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java @@ -27,17 +27,15 @@ import java.io.IOException; -import static org.elasticsearch.search.fetch.QueryFetchSearchResult.readQueryFetchSearchResult; - public final class ScrollQueryFetchSearchResult extends SearchPhaseResult { - private QueryFetchSearchResult result; - - public ScrollQueryFetchSearchResult() { - } + private final QueryFetchSearchResult result; public ScrollQueryFetchSearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + SearchShardTarget searchShardTarget = new SearchShardTarget(in); + result = new QueryFetchSearchResult(in); + setSearchShardTarget(searchShardTarget); } public ScrollQueryFetchSearchResult(QueryFetchSearchResult result, SearchShardTarget shardTarget) { @@ -71,14 +69,6 @@ public FetchSearchResult fetchResult() { return result.fetchResult(); } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - SearchShardTarget searchShardTarget = new SearchShardTarget(in); - result = readQueryFetchSearchResult(in); - setSearchShardTarget(searchShardTarget); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index 6ae302ee87a25..e9ec29cef4ae7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -18,13 +18,10 @@ */ package org.elasticsearch.search.fetch.subphase.highlight; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -78,13 +75,4 @@ public static class Encoders { public static final Encoder HTML = new SimpleHTMLEncoder(); } - static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { - if (type instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; - if (keywordFieldType.normalizer() != null) { - return keywordFieldType.normalizer(); - } - } - return docMapper.mappers().indexAnalyzer(); - } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index ec5071706b031..6ad155104a4cc 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -101,7 +101,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = HighlightUtils.getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); + Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); try { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 2a75e9c58f4fc..b806fb9cd312f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -70,7 +70,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) { int numberOfFragments; try { - final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType, + final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), hitContext); List fieldValues = loadFieldValues(fieldType, field, context, hitContext); if (fieldValues.size() == 0) { @@ -150,8 +150,8 @@ protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchCont } - protected Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type, HitContext hitContext) { - return HighlightUtils.getAnalyzer(docMapper, type); + protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) { + return docMapper.mappers().indexAnalyzer(); } protected List loadFieldValues(MappedFieldType fieldType, SearchContextHighlight.Field field, SearchContext context, diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index bac7b6486bb88..323c41c110f93 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -52,7 +52,7 @@ public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations public InternalSearchResponse(StreamInput in) throws IOException { super( new SearchHits(in), - in.readBoolean() ? InternalAggregations.readAggregations(in) : null, + in.readBoolean() ? new InternalAggregations(in) : null, in.readBoolean() ? new Suggest(in) : null, in.readBoolean(), in.readOptionalBoolean(), @@ -64,7 +64,7 @@ public InternalSearchResponse(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { hits.writeTo(out); - out.writeOptionalStreamable((InternalAggregations)aggregations); + out.writeOptionalWriteable((InternalAggregations)aggregations); out.writeOptionalWriteable(suggest); out.writeBoolean(timedOut); out.writeOptionalBoolean(terminatedEarly); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index f39317712fe0c..12eef5dcf29de 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -107,7 +107,7 @@ private ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType } ShardSearchLocalRequest(StreamInput in) throws IOException { - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); searchType = SearchType.fromId(in.readByte()); numberOfShards = in.readVInt(); scroll = in.readOptionalWriteable(Scroll::new); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index bc4aa05d30537..64621277f6e6f 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -281,6 +281,10 @@ static boolean execute(SearchContext searchContext, } finally { searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); } + if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER + && queryResult.terminatedEarly() == null) { + queryResult.terminatedEarly(false); + } final QuerySearchResult result = searchContext.queryResult(); for (QueryCollectorContext ctx : collectors) { diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 4ccfdd64b0d3c..cde180fd7ef4d 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -67,7 +67,9 @@ public QuerySearchResult() { } public QuerySearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + long id = in.readLong(); + readFromWithId(id, in); } public QuerySearchResult(long id, SearchShardTarget shardTarget) { @@ -256,19 +258,6 @@ public boolean hasSearchContext() { return hasScoreDocs || hasSuggestHits(); } - public static QuerySearchResult readQuerySearchResult(StreamInput in) throws IOException { - QuerySearchResult result = new QuerySearchResult(); - result.readFrom(in); - return result; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - long id = in.readLong(); - readFromWithId(id, in); - } - public void readFromWithId(long id, StreamInput in) throws IOException { this.requestId = id; from = in.readVInt(); @@ -284,7 +273,7 @@ public void readFromWithId(long id, StreamInput in) throws IOException { } setTopDocs(readTopDocs(in)); if (hasAggs = in.readBoolean()) { - aggregations = InternalAggregations.readAggregations(in); + aggregations = new InternalAggregations(in); } if (in.getVersion().before(Version.V_7_2_0)) { List pipelineAggregators = in.readNamedWriteableList(PipelineAggregator.class).stream() diff --git a/server/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java index 632d148ea901b..fe8bad3d098e4 100644 --- a/server/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java @@ -26,17 +26,15 @@ import java.io.IOException; -import static org.elasticsearch.search.query.QuerySearchResult.readQuerySearchResult; - public final class ScrollQuerySearchResult extends SearchPhaseResult { - private QuerySearchResult result; - - public ScrollQuerySearchResult() { - } + private final QuerySearchResult result; public ScrollQuerySearchResult(StreamInput in) throws IOException { - readFrom(in); + super(in); + SearchShardTarget shardTarget = new SearchShardTarget(in); + result = new QuerySearchResult(in); + setSearchShardTarget(shardTarget); } public ScrollQuerySearchResult(QuerySearchResult result, SearchShardTarget shardTarget) { @@ -61,14 +59,6 @@ public QuerySearchResult queryResult() { return result; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - SearchShardTarget shardTarget = new SearchShardTarget(in); - result = readQuerySearchResult(in); - setSearchShardTarget(shardTarget); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index b4358abee0728..4d793de18443d 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -185,7 +185,7 @@ protected static Nested resolveNested(QueryShardContext context, NestedSortBuild final ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); final Query parentQuery; if (objectMapper == null) { - parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); + parentQuery = Queries.newNonNestedFilter(); } else { parentQuery = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java index 28202fa2c0b97..d95fdb0a55692 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java @@ -99,7 +99,7 @@ static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) throws IOEx @Override public void readFrom(StreamInput in) throws IOException { nodeId = in.readOptionalString(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); super.shardId = shardId.getId(); index = shardId.getIndexName(); reason = in.readString(); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 9b6ab76a98745..f052a1c7abeb8 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -448,7 +448,7 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); snapshot = new Snapshot(in); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); status = new ShardSnapshotStatus(in); } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index 42d613016352a..eef9f4f42637c 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -86,6 +86,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException; @@ -102,6 +103,9 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements private static final long NINETY_PER_HEAP_SIZE = (long) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.9); private static final BytesReference EMPTY_BYTES_REFERENCE = new BytesArray(new byte[0]); + // this limit is per-address + private static final int LIMIT_LOCAL_PORTS_COUNT = 6; + protected final Settings settings; protected final ThreadPool threadPool; protected final PageCacheRecycler pageCacheRecycler; @@ -311,14 +315,20 @@ public Map profileBoundAddresses() { } @Override - public List getLocalAddresses() { + public List getDefaultSeedAddresses() { List local = new ArrayList<>(); local.add("127.0.0.1"); // check if v6 is supported, if so, v4 will also work via mapped addresses. if (NetworkUtils.SUPPORTS_V6) { local.add("[::1]"); // may get ports appended! } - return local; + return local.stream() + .flatMap( + address -> Arrays.stream(defaultPortRange()) + .limit(LIMIT_LOCAL_PORTS_COUNT) + .mapToObj(port -> address + ":" + port) + ) + .collect(Collectors.toList()); } protected void bindServer(ProfileSettings profileSettings) { @@ -456,8 +466,17 @@ static int resolvePublishPort(ProfileSettings profileSettings, List addresses = new HashSet<>(Arrays.asList(InetAddress.getAllByName(host))); - List transportAddresses = new ArrayList<>(); - int[] ports = new PortsRange(portString).ports(); - int limit = Math.min(ports.length, perAddressLimit); - for (int i = 0; i < limit; i++) { - for (InetAddress address : addresses) { - transportAddresses.add(new TransportAddress(address, ports[i])); - } - } - return transportAddresses.toArray(new TransportAddress[transportAddresses.size()]); + return Arrays.stream(InetAddress.getAllByName(host)) + .distinct() + .map(address -> new TransportAddress(address, port)) + .toArray(TransportAddress[]::new); } @Override diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index eea8ce0f2ffe8..0b79b6aecf093 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -68,12 +68,12 @@ public interface Transport extends LifecycleComponent { /** * Returns an address from its string representation. */ - TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException; + TransportAddress[] addressesFromString(String address) throws UnknownHostException; /** - * Returns a list of all local adresses for this transport + * Returns a list of all local addresses for this transport */ - List getLocalAddresses(); + List getDefaultSeedAddresses(); default CircuitBreaker getInFlightRequestBreaker() { return new NoopCircuitBreaker("in-flight-noop"); diff --git a/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java b/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java index fc7ebe4b9644e..9e49d06f2b0b6 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportKeepAlive.java @@ -84,10 +84,7 @@ void registerNodeConnection(List nodeChannels, ConnectionProfile con for (TcpChannel channel : nodeChannels) { scheduledPing.addChannel(channel); - - channel.addCloseListener(ActionListener.wrap(() -> { - scheduledPing.removeChannel(channel); - })); + channel.addCloseListener(ActionListener.wrap(() -> scheduledPing.removeChannel(channel))); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 52a5a15e405b4..a89784945db93 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -311,8 +311,8 @@ public BoundTransportAddress boundAddress() { return transport.boundAddress(); } - public List getLocalAddresses() { - return transport.getLocalAddresses(); + public List getDefaultSeedAddresses() { + return transport.getDefaultSeedAddresses(); } /** @@ -748,8 +748,8 @@ private boolean shouldTraceAction(String action) { return true; } - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { - return transport.addressesFromString(address, perAddressLimit); + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { + return transport.addressesFromString(address); } /** diff --git a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index d6b15f3df43dc..b195662dcf20e 100644 --- a/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/server/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -89,6 +89,11 @@ grant codeBase "${codebase.httpasyncclient}" { permission java.net.NetPermission "getProxySelector"; }; +grant codeBase "${codebase.junit-rt.jar}" { + // allows IntelliJ IDEA JUnit test runner to control number of test iterations + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; +}; + grant codeBase "file:${gradle.dist.lib}/-" { // gradle test worker code needs a slew of permissions, we give full access here since gradle isn't a production // dependency and there's no point in exercising the security policy against it @@ -104,4 +109,4 @@ grant codeBase "file:${gradle.worker.jar}" { grant { // since the gradle test worker jar is on the test classpath, our tests should be able to read it permission java.io.FilePermission "${gradle.worker.jar}", "read"; -}; \ No newline at end of file +}; diff --git a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index ce33c247a3337..9d05e119cbb78 100644 --- a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -227,4 +227,34 @@ public void testExtractTerms() throws IOException { assertThat(extracted.size(), equalTo(terms.size())); assertThat(extracted, containsInAnyOrder(terms.toArray(new Term[0]))); } + + public void testMinTTF() throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); + FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); + ft.freeze(); + + for (int i = 0; i < 10; i++) { + Document d = new Document(); + d.add(new TextField("id", Integer.toString(i), Field.Store.YES)); + d.add(new Field("dense", "foo foo foo", ft)); + if (i % 10 == 0) { + d.add(new Field("sparse", "foo", ft)); + } + w.addDocument(d); + } + w.commit(); + DirectoryReader reader = DirectoryReader.open(w); + IndexSearcher searcher = setSimilarity(newSearcher(reader)); + { + String[] fields = new String[]{"dense", "sparse"}; + Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); + TopDocs search = searcher.search(query, 10); + ScoreDoc[] scoreDocs = search.scoreDocs; + assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + } + reader.close(); + w.close(); + dir.close(); + } } diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index c04f131eab063..21a18e4a26ba5 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -102,6 +102,7 @@ public void testMax() { public void testMinimumIndexCompatibilityVersion() { assertEquals(Version.fromId(5000099), Version.fromId(6000099).minimumIndexCompatibilityVersion()); + assertEquals(Version.fromId(2000099), Version.fromId(5000099).minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), Version.fromId(5010000).minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), diff --git a/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java b/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java index ad2447cb7b3d0..e0ef29bf7f49e 100644 --- a/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java +++ b/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java @@ -65,19 +65,17 @@ public void testSimulatedSearchRejectionLoad() throws Throwable { client().prepareSearch("test") .setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field", "1")) - .execute(new ActionListener() { + .execute(new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { responses.add(searchResponse); - latch.countDown(); } @Override public void onFailure(Exception e) { responses.add(e); - latch.countDown(); } - }); + }, latch)); } latch.await(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index e9b940df3847d..b883d593352c2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -470,17 +470,7 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { connectNodes(testNodes); CountDownLatch checkLatch = new CountDownLatch(1); CountDownLatch responseLatch = new CountDownLatch(1); - Task task = startBlockingTestNodesAction(checkLatch, new ActionListener() { - @Override - public void onResponse(NodesResponse nodeResponses) { - responseLatch.countDown(); - } - - @Override - public void onFailure(Exception e) { - responseLatch.countDown(); - } - }); + Task task = startBlockingTestNodesAction(checkLatch, ActionListener.wrap(responseLatch::countDown)); String actionName = "internal:testAction"; // only pick the main action // Try to cancel main task using action name diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java index e8bd14b640dfa..485b2dc3fb4d1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java @@ -19,16 +19,13 @@ package org.elasticsearch.action.admin.cluster.settings; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.test.VersionUtils; -import java.io.IOException; import java.util.List; import java.util.Set; import java.util.function.Predicate; @@ -100,8 +97,4 @@ protected ClusterUpdateSettingsResponse createBlankInstance() { return new ClusterUpdateSettingsResponse(); } - public void testOldSerialisation() throws IOException { - ClusterUpdateSettingsResponse original = createTestInstance(); - assertSerialization(original, VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_4_0)); - } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java index 7f1b7fb41ba18..a4cee7a4cde2a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java @@ -20,12 +20,13 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.ArrayList; @@ -37,7 +38,7 @@ import static org.hamcrest.Matchers.equalTo; -public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase { +public class AnalyzeResponseTests extends AbstractSerializingTestCase { @Override protected Predicate getRandomFieldsExcludeFilter() { @@ -50,8 +51,8 @@ protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOExcept } @Override - protected AnalyzeResponse createBlankInstance() { - return new AnalyzeResponse(); + protected Writeable.Reader instanceReader() { + return AnalyzeResponse::new; } @Override @@ -61,21 +62,24 @@ protected AnalyzeResponse createTestInstance() { for (int i = 0; i < tokenCount; i++) { tokens[i] = randomToken(); } - DetailAnalyzeResponse dar = null; if (randomBoolean()) { - dar = new DetailAnalyzeResponse(); + DetailAnalyzeResponse.CharFilteredText[] charfilters = null; + DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null; if (randomBoolean()) { - dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{ + charfilters = new DetailAnalyzeResponse.CharFilteredText[]{ new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"}) - }); + }; } - dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens)); if (randomBoolean()) { - dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{ + tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{ new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens), new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens) - }); + }; } + DetailAnalyzeResponse dar = new DetailAnalyzeResponse( + charfilters, + new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens), + tokenfilters); return new AnalyzeResponse(null, dar); } return new AnalyzeResponse(Arrays.asList(tokens), null); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexResponseTests.java index cca95e09151ef..f86beff7738e3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/close/CloseIndexResponseTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.equalTo; @@ -48,7 +47,7 @@ public void testBwcSerialization() throws Exception { { final CloseIndexResponse response = randomResponse(); try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_2_0))); + out.setVersion(randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_1_0)); response.writeTo(out); final AcknowledgedResponse deserializedResponse = new AcknowledgedResponse(); @@ -65,7 +64,7 @@ public void testBwcSerialization() throws Exception { final CloseIndexResponse deserializedResponse = new CloseIndexResponse(); try (StreamInput in = out.bytes().streamInput()) { - in.setVersion(randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_2_0))); + in.setVersion(randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_1_0)); deserializedResponse.readFrom(in); } assertThat(deserializedResponse.isAcknowledged(), equalTo(response.isAcknowledged())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index feeb9646e40bf..b14bdd0ed9883 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -65,7 +65,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.stream.IntStream; @@ -165,11 +164,8 @@ public void testCreateShrinkIndexToN() { } public void testShrinkIndexPrimaryTerm() throws Exception { - final List factors = Arrays.asList(2, 3, 5, 7); - final List numberOfShardsFactors = randomSubsetOf(scaledRandomIntBetween(1, factors.size() - 1), factors); - final int numberOfShards = numberOfShardsFactors.stream().reduce(1, (x, y) -> x * y); - final int numberOfTargetShards = randomSubsetOf(randomInt(numberOfShardsFactors.size() - 1), numberOfShardsFactors) - .stream().reduce(1, (x, y) -> x * y); + int numberOfShards = randomIntBetween(2, 20); + int numberOfTargetShards = randomValueOtherThanMany(n -> numberOfShards % n != 0, () -> randomIntBetween(1, numberOfShards - 1)); internalCluster().ensureAtLeastNumDataNodes(2); prepareCreate("source").setSettings(Settings.builder().put(indexSettings()).put("number_of_shards", numberOfShards)).get(); @@ -218,7 +214,7 @@ public void testShrinkIndexPrimaryTerm() throws Exception { final Settings.Builder prepareShrinkSettings = Settings.builder().put("index.routing.allocation.require._name", mergeNode).put("index.blocks.write", true); client().admin().indices().prepareUpdateSettings("source").setSettings(prepareShrinkSettings).get(); - ensureGreen(); + ensureGreen(TimeValue.timeValueSeconds(120)); // needs more than the default to relocate many shards final IndexMetaData indexMetaData = indexMetaData(client(), "source"); final long beforeShrinkPrimaryTerm = IntStream.range(0, numberOfShards).mapToLong(indexMetaData::primaryTerm).max().getAsLong(); @@ -228,7 +224,7 @@ public void testShrinkIndexPrimaryTerm() throws Exception { Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", numberOfTargetShards).build(); assertAcked(client().admin().indices().prepareResizeIndex("source", "target").setSettings(shrinkSettings).get()); - ensureGreen(); + ensureGreen(TimeValue.timeValueSeconds(120)); final IndexMetaData afterShrinkIndexMetaData = indexMetaData(client(), "target"); for (int shardId = 0; shardId < numberOfTargetShards; shardId++) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index 7038505ff6fb3..05d1c5dcd803f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -367,7 +367,8 @@ private static IndexMetaData indexMetaData(final Client client, final String ind public void testCreateSplitIndex() { internalCluster().ensureAtLeastNumDataNodes(2); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_rc2, Version.CURRENT); + + Version version = VersionUtils.randomIndexCompatibleVersion(random()); prepareCreate("source").setSettings(Settings.builder().put(indexSettings()) .put("number_of_shards", 1) .put("index.version.created", version) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 2b8db458eb82f..472b4ddb4a3d1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -23,11 +23,12 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.Collections; @@ -38,7 +39,7 @@ import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.CoreMatchers.equalTo; -public class GetFieldMappingsResponseTests extends AbstractStreamableXContentTestCase { +public class GetFieldMappingsResponseTests extends AbstractSerializingTestCase { public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); @@ -48,9 +49,8 @@ public void testManualSerialization() throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { response.writeTo(out); - GetFieldMappingsResponse serialized = new GetFieldMappingsResponse(); try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) { - serialized.readFrom(in); + GetFieldMappingsResponse serialized = new GetFieldMappingsResponse(in); FieldMappingMetaData metaData = serialized.fieldMappings("index", "type", "field"); assertNotNull(metaData); assertEquals(new BytesArray("{}"), metaData.getSource()); @@ -106,13 +106,13 @@ protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws } @Override - protected GetFieldMappingsResponse createBlankInstance() { - return new GetFieldMappingsResponse(); + protected GetFieldMappingsResponse createTestInstance() { + return new GetFieldMappingsResponse(randomMapping()); } @Override - protected GetFieldMappingsResponse createTestInstance() { - return new GetFieldMappingsResponse(randomMapping()); + protected Writeable.Reader instanceReader() { + return GetFieldMappingsResponse::new; } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponseTests.java index 0cc3f455e83df..a38de844626dc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponseTests.java @@ -20,14 +20,11 @@ package org.elasticsearch.action.admin.indices.rollover; -import org.elasticsearch.Version; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.test.VersionUtils; -import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -131,9 +128,4 @@ protected RolloverResponse mutateInstance(RolloverResponse response) { throw new UnsupportedOperationException(); } } - - public void testOldSerialisation() throws IOException { - RolloverResponse original = createTestInstance(); - assertSerialization(original, VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_4_0)); - } } diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index 2a04a97667722..9f1ee08844b66 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -23,13 +23,14 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.get.GetResult; -import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.RandomObjects; import java.io.IOException; @@ -42,15 +43,16 @@ import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; -public class ExplainResponseTests extends AbstractStreamableXContentTestCase { +public class ExplainResponseTests extends AbstractSerializingTestCase { + @Override protected ExplainResponse doParseInstance(XContentParser parser) throws IOException { return ExplainResponse.fromXContent(parser, randomBoolean()); } @Override - protected ExplainResponse createBlankInstance() { - return new ExplainResponse(); + protected Writeable.Reader instanceReader() { + return ExplainResponse::new; } @Override diff --git a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 1dff130fb98a6..2ee9cb9e1397e 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -41,7 +41,7 @@ protected MainResponse createTestInstance() { ClusterName clusterName = new ClusterName(randomAlphaOfLength(10)); String nodeName = randomAlphaOfLength(10); final String date = new Date(randomNonNegativeLong()).toString(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_1, Version.CURRENT); + Version version = VersionUtils.randomIndexCompatibleVersion(random()); Build build = new Build( Build.Flavor.UNKNOWN, Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(), version.toString() diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java index 55c39f735ce31..bcb4a1200b7e8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -42,32 +43,24 @@ public class ClearScrollControllerTests extends ESTestCase { - public void testClearAll() throws IOException, InterruptedException { + public void testClearAll() throws InterruptedException { DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = new ActionListener() { + ActionListener listener = new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(ClearScrollResponse clearScrollResponse) { - try { - assertEquals(3, clearScrollResponse.getNumFreed()); - assertTrue(clearScrollResponse.isSucceeded()); - } finally { - latch.countDown(); - } + assertEquals(3, clearScrollResponse.getNumFreed()); + assertTrue(clearScrollResponse.isSucceeded()); } @Override public void onFailure(Exception e) { - try { - throw new AssertionError(e); - } finally { - latch.countDown(); - } + throw new AssertionError(e); } - }; + }, latch); List nodesInvoked = new CopyOnWriteArrayList<>(); SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override @@ -112,27 +105,18 @@ public void testClearScrollIds() throws IOException, InterruptedException { String scrollId = TransportSearchHelper.buildScrollId(array); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = new ActionListener() { + ActionListener listener = new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(ClearScrollResponse clearScrollResponse) { - try { - assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); - assertTrue(clearScrollResponse.isSucceeded()); - } finally { - latch.countDown(); - } - + assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); + assertTrue(clearScrollResponse.isSucceeded()); } @Override public void onFailure(Exception e) { - try { - throw new AssertionError(e); - } finally { - latch.countDown(); - } + throw new AssertionError(e); } - }; + }, latch); List nodesInvoked = new CopyOnWriteArrayList<>(); SearchTransportService searchTransportService = new SearchTransportService(null, null) { @@ -185,32 +169,22 @@ public void testClearScrollIdsWithFailure() throws IOException, InterruptedExcep DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = new ActionListener() { + ActionListener listener = new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(ClearScrollResponse clearScrollResponse) { - try { - assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); - if (numFailures.get() > 0) { - assertFalse(clearScrollResponse.isSucceeded()); - } else { - assertTrue(clearScrollResponse.isSucceeded()); - } - - } finally { - latch.countDown(); + assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); + if (numFailures.get() > 0) { + assertFalse(clearScrollResponse.isSucceeded()); + } else { + assertTrue(clearScrollResponse.isSucceeded()); } - } @Override public void onFailure(Exception e) { - try { - throw new AssertionError(e); - } finally { - latch.countDown(); - } + throw new AssertionError(e); } - }; + }, latch); List nodesInvoked = new CopyOnWriteArrayList<>(); SearchTransportService searchTransportService = new SearchTransportService(null, null) { diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index ece695575a107..01f1109ef3bed 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; @@ -154,7 +155,8 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse(new SearchResponse()); + listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY)); }); } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 21ac0cdf636d2..6a6b1c54db225 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -27,12 +27,12 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; @@ -407,22 +407,21 @@ static GroupShardsIterator getShardsIter(String index, Orig } public static class TestSearchResponse extends SearchResponse { - public final Set queried = new HashSet<>(); + final Set queried = new HashSet<>(); + + TestSearchResponse() { + super(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY); + } } public static class TestSearchPhaseResult extends SearchPhaseResult { final DiscoveryNode node; - public TestSearchPhaseResult(long id, DiscoveryNode node) { + TestSearchPhaseResult(long id, DiscoveryNode node) { this.requestId = id; this.node = node; } - @Override - public void readFrom(StreamInput in) throws IOException { - - } - @Override public void writeTo(StreamOutput out) throws IOException { @@ -433,7 +432,7 @@ public static final class MockConnection implements Transport.Connection { private final DiscoveryNode node; - public MockConnection(DiscoveryNode node) { + MockConnection(DiscoveryNode node) { this.node = node; } @@ -444,7 +443,7 @@ public DiscoveryNode getNode() { @Override public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) - throws IOException, TransportException { + throws TransportException { throw new UnsupportedOperationException(); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index a0c4626e9543f..8f1d89a37daaa 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -80,7 +80,7 @@ public void testRandomVersionSerialization() throws IOException { SearchRequest deserializedRequest = copyWriteable(searchRequest, namedWriteableRegistry, SearchRequest::new, version); assertEquals(searchRequest.isCcsMinimizeRoundtrips(), deserializedRequest.isCcsMinimizeRoundtrips()); assertEquals(searchRequest.getLocalClusterAlias(), deserializedRequest.getLocalClusterAlias()); - assertEquals(searchRequest.getOrCreateAbsoluteStartMillis(), deserializedRequest.getOrCreateAbsoluteStartMillis()); + assertEquals(searchRequest.getAbsoluteStartMillis(), deserializedRequest.getAbsoluteStartMillis()); assertEquals(searchRequest.isFinalReduce(), deserializedRequest.isFinalReduce()); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 18890e1339557..ed5104cabe5d2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -278,7 +278,7 @@ public void testToXContent() { public void testSerialization() throws IOException { SearchResponse searchResponse = createTestItem(false); - SearchResponse deserialized = copyStreamable(searchResponse, namedWriteableRegistry, SearchResponse::new, Version.CURRENT); + SearchResponse deserialized = copyWriteable(searchResponse, namedWriteableRegistry, SearchResponse::new, Version.CURRENT); if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index dade5eadb1832..7ecc172924b1c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -117,7 +118,8 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse(new SearchResponse()); + listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY)); }); } }; diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index f222bcc015c62..96d057f50c4f7 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.tasks.Task; @@ -65,7 +66,7 @@ public void shutdown() throws Exception { terminate(threadPool); } - public void testActionFiltersRequest() throws ExecutionException, InterruptedException { + public void testActionFiltersRequest() throws InterruptedException { int numFilters = randomInt(10); Set orders = new HashSet<>(numFilters); while (orders.size() < numFilters) { @@ -139,7 +140,7 @@ protected void doExecute(Task task, TestRequest request, ActionListener failures = new CopyOnWriteArrayList<>(); - transportAction.execute(new TestRequest(), new ActionListener() { + transportAction.execute(new TestRequest(), new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(TestResponse testResponse) { responses.incrementAndGet(); - latch.countDown(); } @Override public void onFailure(Exception e) { failures.add(e); - latch.countDown(); } - }); + }, latch)); if (!latch.await(10, TimeUnit.SECONDS)) { fail("timeout waiting for the filter to notify the listener as many times as expected"); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 1a7e5a73e7523..57b30d3484bc9 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -369,17 +369,7 @@ public void testConcurrentWriteReplicaResultCompletion() throws InterruptedExcep CountDownLatch completionLatch = new CountDownLatch(1); threadPool.generic().execute(() -> { waitForBarrier.run(); - replicaResult.respond(new ActionListener() { - @Override - public void onResponse(TransportResponse.Empty empty) { - completionLatch.countDown(); - } - - @Override - public void onFailure(Exception e) { - completionLatch.countDown(); - } - }); + replicaResult.respond(ActionListener.wrap(completionLatch::countDown)); }); if (randomBoolean()) { threadPool.generic().execute(() -> { diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index 0cd9d3130f176..8ab452950a87a 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -77,8 +77,7 @@ public void testStreamResponse() throws Exception { // read ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer); - TermVectorsResponse inResponse = new TermVectorsResponse("a", "b", "c"); - inResponse.readFrom(esBuffer); + TermVectorsResponse inResponse = new TermVectorsResponse(esBuffer); // see if correct checkIfStandardTermVector(inResponse); @@ -93,8 +92,7 @@ public void testStreamResponse() throws Exception { // read esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); esBuffer = new InputStreamStreamInput(esInBuffer); - inResponse = new TermVectorsResponse("a", "b", "c"); - inResponse.readFrom(esBuffer); + inResponse = new TermVectorsResponse(esBuffer); assertTrue(inResponse.isExists()); } diff --git a/server/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/server/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index eda54612148b6..5149a0837e908 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/server/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -170,7 +170,7 @@ public BoundTransportAddress boundAddress() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { throw new UnsupportedOperationException(); } diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index bdcaf80ee19e9..9e13dbaa89b18 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -128,7 +128,7 @@ private static class TestIteration implements Closeable { threadPool = new TestThreadPool("transport-client-nodes-service-tests"); transport = new FailAndRetryMockTransport(random(), clusterName) { @Override - public List getLocalAddresses() { + public List getDefaultSeedAddresses() { return Collections.emptyList(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 8f395c2d137a0..4bd5d2e675b75 100644 --- a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -202,7 +202,6 @@ public void testTwoNodesNoMasterBlock() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39172") public void testThreeNodesNoMasterBlock() throws Exception { internalCluster().setBootstrapMasterNodeIndex(2); diff --git a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index 25179427d863c..193cde3180db8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -401,7 +401,7 @@ public Map profileBoundAddresses() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) { + public TransportAddress[] addressesFromString(String address) { return new TransportAddress[0]; } @@ -440,7 +440,7 @@ public boolean isClosed() { } @Override - public List getLocalAddresses() { + public List getDefaultSeedAddresses() { return null; } diff --git a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java index 4bd6c15853aa0..51a34d94b3a05 100644 --- a/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java @@ -32,9 +32,7 @@ import java.util.List; import static java.util.EnumSet.copyOf; -import static org.elasticsearch.test.VersionUtils.getPreviousVersion; import static org.elasticsearch.test.VersionUtils.randomVersion; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; @@ -62,45 +60,6 @@ public void testSerialization() throws Exception { } } - public void testBwcSerialization() throws Exception { - for (int runs = 0; runs < randomIntBetween(5, 20); runs++) { - // Generate a random cluster block in version < 7.0.0 - final Version version = randomVersionBetween(random(), Version.V_6_0_0, getPreviousVersion(Version.V_6_7_0)); - final ClusterBlock expected = randomClusterBlock(version); - assertNull(expected.uuid()); - - // Serialize to node in current version - final BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - - // Deserialize and check the cluster block - final ClusterBlock actual = new ClusterBlock(out.bytes().streamInput()); - assertClusterBlockEquals(expected, actual); - } - - for (int runs = 0; runs < randomIntBetween(5, 20); runs++) { - // Generate a random cluster block in current version - final ClusterBlock expected = randomClusterBlock(Version.CURRENT); - - // Serialize to node in version < 7.0.0 - final BytesStreamOutput out = new BytesStreamOutput(); - out.setVersion(randomVersionBetween(random(), Version.V_6_0_0, getPreviousVersion(Version.V_6_7_0))); - expected.writeTo(out); - - // Deserialize and check the cluster block - final StreamInput in = out.bytes().streamInput(); - in.setVersion(out.getVersion()); - final ClusterBlock actual = new ClusterBlock(in); - - assertThat(actual.id(), equalTo(expected.id())); - assertThat(actual.status(), equalTo(expected.status())); - assertThat(actual.description(), equalTo(expected.description())); - assertThat(actual.retryable(), equalTo(expected.retryable())); - assertThat(actual.disableStatePersistence(), equalTo(expected.disableStatePersistence())); - assertArrayEquals(actual.levels().toArray(), expected.levels().toArray()); - } - } - public void testToStringDanglingComma() { final ClusterBlock clusterBlock = randomClusterBlock(); assertThat(clusterBlock.toString(), not(endsWith(","))); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 6f078217e4f45..b4d337a1bf57e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -26,6 +26,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskListener; @@ -54,6 +55,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -63,6 +65,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.SeedHostsProvider.HostsResolver; import org.elasticsearch.env.NodeEnvironment; @@ -93,6 +96,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -135,6 +139,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -1149,6 +1154,67 @@ public void testSingleNodeDiscoveryWithQuorum() { cluster.stabilise(); } + private static class BrokenCustom extends AbstractDiffable implements ClusterState.Custom { + + static final String EXCEPTION_MESSAGE = "simulated"; + + @Override + public String getWriteableName() { + return "broken"; + } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new ElasticsearchException(EXCEPTION_MESSAGE); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder; + } + } + + public void testClusterRecoversAfterExceptionDuringSerialization() { + final Cluster cluster = new Cluster(randomIntBetween(2, 5)); // 1-node cluster doesn't do any serialization + cluster.runRandomly(); + cluster.stabilise(); + + final ClusterNode leader1 = cluster.getAnyLeader(); + + logger.info("--> submitting broken task to [{}]", leader1); + + final AtomicBoolean failed = new AtomicBoolean(); + leader1.submitUpdateTask("broken-task", + cs -> ClusterState.builder(cs).putCustom("broken", new BrokenCustom()).build(), + (source, e) -> { + assertThat(e.getCause(), instanceOf(ElasticsearchException.class)); + assertThat(e.getCause().getMessage(), equalTo(BrokenCustom.EXCEPTION_MESSAGE)); + failed.set(true); + }); + cluster.runFor(DEFAULT_DELAY_VARIABILITY + 1, "processing broken task"); + assertTrue(failed.get()); + + cluster.stabilise(); + + final ClusterNode leader2 = cluster.getAnyLeader(); + long finalValue = randomLong(); + + logger.info("--> submitting value [{}] to [{}]", finalValue, leader2); + leader2.submitValue(finalValue); + cluster.stabilise(DEFAULT_CLUSTER_STATE_UPDATE_DELAY); + + for (final ClusterNode clusterNode : cluster.clusterNodes) { + final String nodeId = clusterNode.getId(); + final ClusterState appliedState = clusterNode.getLastAppliedClusterState(); + assertThat(nodeId + " has the applied value", value(appliedState), is(finalValue)); + } + } + private static long defaultMillis(Setting setting) { return setting.get(Settings.EMPTY).millis() + Cluster.DEFAULT_DELAY_VARIABILITY; } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java index 35fa5786bbda3..e20559ca00561 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.coordination; import org.elasticsearch.Version; -import org.elasticsearch.cluster.coordination.JoinTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -30,7 +29,6 @@ import org.elasticsearch.test.VersionUtils; import static org.elasticsearch.test.VersionUtils.getPreviousVersion; -import static org.elasticsearch.test.VersionUtils.incompatibleFutureVersion; import static org.elasticsearch.test.VersionUtils.maxCompatibleVersion; import static org.elasticsearch.test.VersionUtils.randomCompatibleVersion; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -89,21 +87,9 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { }); } - if (minNodeVersion.before(Version.V_6_0_0)) { - Version tooHigh = incompatibleFutureVersion(minNodeVersion); - expectThrows(IllegalStateException.class, () -> { - if (randomBoolean()) { - JoinTaskExecutor.ensureNodesCompatibility(tooHigh, nodes); - } else { - JoinTaskExecutor.ensureNodesCompatibility(tooHigh, minNodeVersion, maxNodeVersion); - } - }); - } + Version oldMajor = Version.V_6_4_0.minimumCompatibilityVersion(); + expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion)); - if (minNodeVersion.onOrAfter(Version.V_7_0_0)) { - Version oldMajor = Version.V_6_4_0.minimumCompatibilityVersion(); - expectThrows(IllegalStateException.class, () -> JoinTaskExecutor.ensureMajorVersionBarrier(oldMajor, minNodeVersion)); - } final Version minGoodVersion = maxNodeVersion.major == minNodeVersion.major ? // we have to stick with the same major diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/test/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index 17c9e3458c1f1..c31df3ade71cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -181,7 +181,6 @@ public void testDeleteCreateInOneBulk() throws Exception { assertHitCount(client().prepareSearch("test").get(), 0); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/41030") public void testDelayedMappingPropagationOnPrimary() throws Exception { // Here we want to test that things go well if there is a first request // that adds mappings but before mappings are propagated to all nodes @@ -274,7 +273,6 @@ public void testDelayedMappingPropagationOnPrimary() throws Exception { }); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/36813") public void testDelayedMappingPropagationOnReplica() throws Exception { // This is essentially the same thing as testDelayedMappingPropagationOnPrimary // but for replicas diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index e3ba62b3b79ab..f83d0aa783c24 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -100,13 +100,6 @@ public static boolean isSplitable(int source, int target) { } public void testNumberOfShards() { - { - final Version versionCreated = VersionUtils.randomVersionBetween( - random(), - Version.V_6_0_0_alpha1, VersionUtils.getPreviousVersion(Version.V_7_0_0)); - final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated); - assertThat(MetaDataCreateIndexService.IndexCreationTask.getNumberOfShards(indexSettingsBuilder), equalTo(5)); - } { final Version versionCreated = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT); final Settings.Builder indexSettingsBuilder = Settings.builder().put(SETTING_VERSION_CREATED, versionCreated); @@ -466,7 +459,7 @@ public void testCalculateNumRoutingShards() { double ratio = numRoutingShards / randomNumShards; int intRatio = (int) ratio; - assertEquals(ratio, (double)(intRatio), 0.0d); + assertEquals(ratio, intRatio, 0.0d); assertTrue(1 < ratio); assertTrue(ratio <= 1024); assertEquals(0, intRatio % 2); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 4c1ba0ff77e34..3724f47537428 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -165,7 +165,7 @@ public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_6_0_0_alpha1) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT.minimumIndexCompatibilityVersion()) .put(indexSettings) .build(); return IndexMetaData.builder(name).settings(build).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 71a31b320591a..2e56ae6297b89 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -230,7 +230,7 @@ protected DiscoveryNode createNode(DiscoveryNode.Role... mustHaveRoles) { } final String id = String.format(Locale.ROOT, "node_%03d", nodeIdGenerator.incrementAndGet()); return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles, - VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, null)); + VersionUtils.randomIndexCompatibleVersion(random())); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index c2d6a67468f3f..889be132d45d9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -633,8 +633,10 @@ public void testReplicaOnNewestVersionIsPromoted() { clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder(clusterState.nodes()) - .add(newNode("node3-6.x", VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, null))) - .add(newNode("node4-6.x", VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, null)))) + .add(newNode("node3-old", + VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumIndexCompatibilityVersion(), null))) + .add(newNode("node4-old", + VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumIndexCompatibilityVersion(), null)))) .build(); // start all the replicas diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index c3a541fe87ec2..061d83c9c3865 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -733,14 +733,21 @@ public void testIso8601Parsers() { JodaDateFormatter jodaFormatter = new JodaDateFormatter(format, isoFormatter, isoFormatter); DateFormatter javaFormatter = DateFormatter.forPattern(format); + assertSameDate("2018-10-10", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10+0430", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10:11-08:00", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10:11Z", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11:12", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10:11:12+0100", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11:12.123", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11:12.123Z", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10:11:12.123+0000", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11:12,123", format, jodaFormatter, javaFormatter); assertSameDate("2018-10-10T10:11:12,123Z", format, jodaFormatter, javaFormatter); + assertSameDate("2018-10-10T10:11:12,123+05:30", format, jodaFormatter, javaFormatter); } public void testParsingMissingTimezone() { diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/QueriesTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/QueriesTests.java index a1236fd53df92..bb759848dc4b7 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/QueriesTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/QueriesTests.java @@ -20,33 +20,21 @@ package org.elasticsearch.common.lucene.search; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; public class QueriesTests extends ESTestCase { public void testNonNestedQuery() { - for (Version version : VersionUtils.allVersions()) { - // This is a custom query that extends AutomatonQuery and want to make sure the equals method works - assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version)); - assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode()); - if (version.onOrAfter(Version.V_6_1_0)) { - assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); - } else { - assertEquals(Queries.newNonNestedFilter(version), new BooleanQuery.Builder() - .add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER) - .add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT) - .build()); - } - } + // This is a custom query that extends AutomatonQuery and want to make sure the equals method works + assertEquals(Queries.newNonNestedFilter(), Queries.newNonNestedFilter()); + assertEquals(Queries.newNonNestedFilter().hashCode(), Queries.newNonNestedFilter().hashCode()); + assertEquals(Queries.newNonNestedFilter(), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); } public void testIsNegativeQuery() { diff --git a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java index b18daf07bf361..febe2b976fb47 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java @@ -202,7 +202,7 @@ public void testLuceneVersionOnUnknownVersions() { // too old version, major should be the oldest supported lucene version minus 1 version = Version.fromString("5.2.1"); - assertEquals(Version.V_6_0_0.luceneVersion.major - 1, version.luceneVersion.major); + assertEquals(VersionUtils.getFirstVersion().luceneVersion.major - 1, version.luceneVersion.major); // future version, should be the same version as today version = Version.fromString("8.77.1"); diff --git a/server/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTestCase.java b/server/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTestCase.java index be7799fcd6c67..d2d12b32da4e1 100644 --- a/server/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTestCase.java @@ -99,7 +99,6 @@ public void testReuse() { assertNotSame(b1, b2); } o.close(); - r.close(); } public void testRecycle() { @@ -111,7 +110,6 @@ public void testRecycle() { o = r.obtain(); assertRecycled(o.v()); o.close(); - r.close(); } public void testDoubleRelease() { @@ -128,7 +126,6 @@ public void testDoubleRelease() { final Recycler.V v2 = r.obtain(); final Recycler.V v3 = r.obtain(); assertNotSame(v2.v(), v3.v()); - r.close(); } public void testDestroyWhenOverCapacity() { @@ -152,9 +149,6 @@ public void testDestroyWhenOverCapacity() { // release first ref, verify for destruction o.close(); assertDead(data); - - // close the rest - r.close(); } public void testClose() { @@ -171,10 +165,6 @@ public void testClose() { // verify that recycle() ran assertRecycled(data); - - // closing the recycler should mark recycled instances via destroy() - r.close(); - assertDead(data); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 220392a952c29..b2f73db90f722 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -964,4 +964,13 @@ public void testAffixMapUpdateWithNullSettingValue() { assertEquals("", value); } + public void testNonSecureSettingInKeystore() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("foo", "bar"); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + Setting setting = Setting.simpleString("foo", Property.NodeScope); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> setting.get(settings)); + assertThat(e.getMessage(), containsString("must be stored inside elasticsearch.yml")); + } + } diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index 3c97d27fe7874..8f2a661664304 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -198,6 +198,58 @@ public void testParsingStrictNanoDates() { formatter.format(formatter.parse("2018-05-15T17:14:56.123456789+01:00")); } + public void testIso8601Parsing() { + DateFormatter formatter = DateFormatters.forPattern("iso8601"); + + // timezone not allowed with just date + formatter.format(formatter.parse("2018-05-15")); + + formatter.format(formatter.parse("2018-05-15T17")); + formatter.format(formatter.parse("2018-05-15T17Z")); + formatter.format(formatter.parse("2018-05-15T17+0100")); + formatter.format(formatter.parse("2018-05-15T17+01:00")); + + formatter.format(formatter.parse("2018-05-15T17:14")); + formatter.format(formatter.parse("2018-05-15T17:14Z")); + formatter.format(formatter.parse("2018-05-15T17:14-0100")); + formatter.format(formatter.parse("2018-05-15T17:14-01:00")); + + formatter.format(formatter.parse("2018-05-15T17:14:56")); + formatter.format(formatter.parse("2018-05-15T17:14:56Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56+01:00")); + + // milliseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123-0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123+01:00")); + + // microseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456-0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456-01:00")); + + // nanoseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+0100")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00")); + } + public void testRoundupFormatterWithEpochDates() { assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L); // also check nanos of the epoch_millis formatter if it is rounded up to the nano second @@ -260,4 +312,44 @@ public void test0MillisAreFormatted() { String formatted = formatter.formatMillis(clock.millis()); assertThat(formatted, is("2019-02-08T11:43:00.000Z")); } + + public void testFractionalSeconds() { + DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time"); + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1Z")); + assertThat(instant.getNano(), is(100_000_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12Z")); + assertThat(instant.getNano(), is(120_000_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123Z")); + assertThat(instant.getNano(), is(123_000_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1234Z")); + assertThat(instant.getNano(), is(123_400_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12345Z")); + assertThat(instant.getNano(), is(123_450_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123456Z")); + assertThat(instant.getNano(), is(123_456_000)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.1234567Z")); + assertThat(instant.getNano(), is(123_456_700)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.12345678Z")); + assertThat(instant.getNano(), is(123_456_780)); + } + { + Instant instant = Instant.from(formatter.parse("2019-05-06T14:52:37.123456789Z")); + assertThat(instant.getNano(), is(123_456_789)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java index 2b125127f66d3..4ef095da049ec 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java @@ -45,7 +45,8 @@ public class DateUtilsTests extends ESTestCase { private static final Set IGNORE = new HashSet<>(Arrays.asList( - "Eire", "Europe/Dublin" // dublin timezone in joda does not account for DST + "Eire", "Europe/Dublin", // dublin timezone in joda does not account for DST + "Asia/Qostanay" // this has been added in joda 2.10.2 but is not part of the JDK 12.0.1 tzdata yet )); public void testTimezoneIds() { diff --git a/server/src/test/java/org/elasticsearch/common/util/MapsTests.java b/server/src/test/java/org/elasticsearch/common/util/MapsTests.java index a8338c7e4c38d..6edffee268ecc 100644 --- a/server/src/test/java/org/elasticsearch/common/util/MapsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/MapsTests.java @@ -114,9 +114,22 @@ private void assertMapEntries(final Map map, final Collection map) { + final String nonPresentKey = randomValueOtherThanMany(map.keySet()::contains, () -> randomAlphaOfLength(16)); + assertUnsupported("putting a map entry should be unsupported", () -> map.put(nonPresentKey, randomAlphaOfLength(16))); + assertUnsupported("putting map entries should be unsupported", () -> map.putAll(Map.of(nonPresentKey, randomAlphaOfLength(16)))); + assertUnsupported( + "computing a new map association should be unsupported", + () -> map.compute(nonPresentKey, (k, v) -> randomAlphaOfLength(16))); + assertUnsupported( + "computing a new map association should be unsupported", + () -> map.computeIfAbsent(nonPresentKey, k -> randomAlphaOfLength(16))); + assertUnsupported("map merge should be unsupported", () -> map.merge(nonPresentKey, randomAlphaOfLength(16), (k, v) -> v)); + if (map.isEmpty()) { + return; + } + final String presentKey = randomFrom(map.keySet()); final String valueNotAssociatedWithPresentKey = randomValueOtherThan(map.get(presentKey), () -> randomAlphaOfLength(16)); - final String nonPresentKey = randomValueOtherThanMany(map.keySet()::contains, () -> randomAlphaOfLength(16)); assertUnsupported("clearing map should be unsupported", map::clear); assertUnsupported("replacing a map entry should be unsupported", () -> map.replace(presentKey, valueNotAssociatedWithPresentKey)); assertUnsupported( @@ -125,24 +138,15 @@ private void assertMapImmutability(final Map map) { assertUnsupported("replacing map entries should be unsupported", () -> map.replaceAll((k, v) -> v + v)); assertUnsupported("removing a map entry should be unsupported", () -> map.remove(presentKey)); assertUnsupported("removing a map entry should be unsupported", () -> map.remove(presentKey, map.get(presentKey))); - assertUnsupported("putting a map entry should be unsupported", () -> map.put(nonPresentKey, randomAlphaOfLength(16))); - assertUnsupported("putting map entries should be unsupported", () -> map.putAll(Map.of(nonPresentKey, randomAlphaOfLength(16)))); assertUnsupported( "computing a new map association should be unsupported", () -> map.compute(presentKey, (k, v) -> randomBoolean() ? null : v + v)); - assertUnsupported( - "computing a new map association should be unsupported", - () -> map.compute(nonPresentKey, (k, v) -> randomAlphaOfLength(16))); - assertUnsupported( - "computing a new map association should be unsupported", - () -> map.computeIfAbsent(nonPresentKey, k -> randomAlphaOfLength(16))); assertUnsupported( "computing a new map association should be unsupported", () -> map.computeIfPresent(presentKey, (k, v) -> randomBoolean() ? null : v + v)); assertUnsupported( "map merge should be unsupported", () -> map.merge(presentKey, map.get(presentKey), (k, v) -> randomBoolean() ? null : v + v)); - assertUnsupported("map merge should be unsupported", () -> map.merge(nonPresentKey, randomAlphaOfLength(16), (k, v) -> v)); } private void assertUnsupported(final String message, final ThrowingRunnable runnable) { diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java index 1a32064fe7daa..46021344fb73a 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java @@ -34,12 +34,13 @@ public void testConcurrent() throws InterruptedException { final AtomicInteger count = new AtomicInteger(0); final CountDown countDown = new CountDown(scaledRandomIntBetween(10, 1000)); Thread[] threads = new Thread[between(3, 10)]; - final CountDownLatch latch = new CountDownLatch(1); + final CountDownLatch latch = new CountDownLatch(1 + threads.length); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread() { @Override public void run() { + latch.countDown(); try { latch.await(); } catch (InterruptedException e) { diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java index e50e205ff1386..2160052619c11 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java @@ -45,8 +45,8 @@ public void testIfMapEmptyAfterLotsOfAcquireAndReleases() throws InterruptedExce for (int i = 0; i < names.length; i++) { names[i] = randomRealisticUnicodeOfLengthBetween(10, 20); } - CountDownLatch startLatch = new CountDownLatch(1); int numThreads = randomIntBetween(3, 10); + final CountDownLatch startLatch = new CountDownLatch(1 + numThreads); AcquireAndReleaseThread[] threads = new AcquireAndReleaseThread[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new AcquireAndReleaseThread(startLatch, connectionLock, names, counter, safeCounter); @@ -157,6 +157,7 @@ public AcquireAndReleaseThread(CountDownLatch startLatch, KeyedLock conn @Override public void run() { + startLatch.countDown(); try { startLatch.await(); } catch (InterruptedException e) { diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java index 8e4c729ee9cef..6b5f7d95700d1 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java @@ -226,16 +226,43 @@ public void testExecutionEWMACalculation() throws Exception { context.close(); } + /** Use a runnable wrapper that simulates a task with unknown failures. */ + public void testExceptionThrowingTask() throws Exception { + ThreadContext context = new ThreadContext(Settings.EMPTY); + ResizableBlockingQueue queue = + new ResizableBlockingQueue<>(ConcurrentCollections.newBlockingQueue(), + 100); + + QueueResizingEsThreadPoolExecutor executor = + new QueueResizingEsThreadPoolExecutor( + "test-threadpool", 1, 1, 1000, + TimeUnit.MILLISECONDS, queue, 10, 200, exceptionalWrapper(), 10, TimeValue.timeValueMillis(1), + EsExecutors.daemonThreadFactory("queuetest"), new EsAbortPolicy(), context); + executor.prestartAllCoreThreads(); + logger.info("--> executor: {}", executor); + + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(0L)); + executeTask(executor, 1); + executor.shutdown(); + executor.awaitTermination(10, TimeUnit.SECONDS); + context.close(); + } + private Function fastWrapper() { - return (runnable) -> { - return new SettableTimedRunnable(TimeUnit.NANOSECONDS.toNanos(100)); - }; + return (runnable) -> new SettableTimedRunnable(TimeUnit.NANOSECONDS.toNanos(100), false); } private Function slowWrapper() { - return (runnable) -> { - return new SettableTimedRunnable(TimeUnit.MINUTES.toNanos(2)); - }; + return (runnable) -> new SettableTimedRunnable(TimeUnit.MINUTES.toNanos(2), false); + } + + /** + * The returned function outputs a WrappedRunnabled that simulates the case + * where {@link TimedRunnable#getTotalExecutionNanos()} returns -1 because + * the job failed or was rejected before it finished. + */ + private Function exceptionalWrapper() { + return (runnable) -> new SettableTimedRunnable(TimeUnit.NANOSECONDS.toNanos(-1), true); } /** Execute a blank task {@code times} times for the executor */ @@ -248,10 +275,12 @@ private void executeTask(QueueResizingEsThreadPoolExecutor executor, int times) public class SettableTimedRunnable extends TimedRunnable { private final long timeTaken; + private final boolean testFailedOrRejected; - public SettableTimedRunnable(long timeTaken) { + public SettableTimedRunnable(long timeTaken, boolean failedOrRejected) { super(() -> {}); this.timeTaken = timeTaken; + this.testFailedOrRejected = failedOrRejected; } @Override @@ -263,5 +292,10 @@ public long getTotalNanos() { public long getTotalExecutionNanos() { return timeTaken; } + + @Override + public boolean getFailedOrRejected() { + return testFailedOrRejected; + } } } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/RunOnceTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/RunOnceTests.java index e833edc9d56b3..a41d37be2150a 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/RunOnceTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/RunOnceTests.java @@ -45,9 +45,10 @@ public void testRunOnceConcurrently() throws InterruptedException { final RunOnce runOnce = new RunOnce(counter::incrementAndGet); final Thread[] threads = new Thread[between(3, 10)]; - final CountDownLatch latch = new CountDownLatch(1); + final CountDownLatch latch = new CountDownLatch(1 + threads.length); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread(() -> { + latch.countDown(); try { latch.await(); } catch (InterruptedException e) { diff --git a/server/src/test/java/org/elasticsearch/discovery/SeedHostsResolverTests.java b/server/src/test/java/org/elasticsearch/discovery/SeedHostsResolverTests.java index 3527d6de3da64..3d303ef47c462 100644 --- a/server/src/test/java/org/elasticsearch/discovery/SeedHostsResolverTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/SeedHostsResolverTests.java @@ -152,43 +152,6 @@ public void testResolvesAddressesInBackgroundAndIgnoresConcurrentCalls() throws assertThat(resolvedAddressesRef.get(), equalTo(transportAddresses)); } - public void testPortLimit() { - final NetworkService networkService = new NetworkService(Collections.emptyList()); - final Transport transport = new MockNioTransport( - Settings.EMPTY, - Version.CURRENT, - threadPool, - networkService, - PageCacheRecycler.NON_RECYCLING_INSTANCE, - new NamedWriteableRegistry(Collections.emptyList()), - new NoneCircuitBreakerService()) { - - @Override - public BoundTransportAddress boundAddress() { - return new BoundTransportAddress( - new TransportAddress[]{new TransportAddress(InetAddress.getLoopbackAddress(), 9500)}, - new TransportAddress(InetAddress.getLoopbackAddress(), 9500) - ); - } - }; - closeables.push(transport); - final TransportService transportService = - new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, - Collections.emptySet()); - closeables.push(transportService); - recreateSeedHostsResolver(transportService); - final int limitPortCounts = randomIntBetween(1, 10); - final List transportAddresses = seedHostsResolver.resolveHosts(Collections.singletonList("127.0.0.1"), - limitPortCounts); - assertThat(transportAddresses, hasSize(limitPortCounts)); - final Set ports = new HashSet<>(); - for (final TransportAddress address : transportAddresses) { - assertTrue(address.address().getAddress().isLoopbackAddress()); - ports.add(address.getPort()); - } - assertThat(ports, equalTo(IntStream.range(9300, 9300 + limitPortCounts).boxed().collect(Collectors.toSet()))); - } - public void testRemovingLocalAddresses() { final NetworkService networkService = new NetworkService(Collections.emptyList()); final InetAddress loopbackAddress = InetAddress.getLoopbackAddress(); @@ -218,9 +181,10 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); recreateSeedHostsResolver(transportService); - final List transportAddresses = seedHostsResolver.resolveHosts( - Collections.singletonList(NetworkAddress.format(loopbackAddress)), - 10); + List hosts = IntStream.range(9300, 9310) + .mapToObj(port -> NetworkAddress.format(loopbackAddress) + ":" + port) + .collect(Collectors.toList()); + final List transportAddresses = seedHostsResolver.resolveHosts(hosts); assertThat(transportAddresses, hasSize(7)); final Set ports = new HashSet<>(); for (final TransportAddress address : transportAddresses) { @@ -252,7 +216,7 @@ public BoundTransportAddress boundAddress() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { throw unknownHostException; } @@ -279,7 +243,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi try { Loggers.addAppender(logger, appender); - final List transportAddresses = seedHostsResolver.resolveHosts(Collections.singletonList(hostname), 1); + final List transportAddresses = seedHostsResolver.resolveHosts(Collections.singletonList(hostname)); assertThat(transportAddresses, empty()); appender.assertAllExpectationsMatched(); @@ -310,7 +274,7 @@ public BoundTransportAddress boundAddress() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { if ("hostname1".equals(address)) { return new TransportAddress[]{new TransportAddress(TransportAddress.META_ADDRESS, 9300)}; } else if ("hostname2".equals(address)) { @@ -346,7 +310,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi try { Loggers.addAppender(logger, appender); - final List transportAddresses = seedHostsResolver.resolveHosts(Arrays.asList("hostname1", "hostname2"), 1); + final List transportAddresses = seedHostsResolver.resolveHosts(Arrays.asList("hostname1", "hostname2")); assertThat(transportAddresses, hasSize(1)); appender.assertAllExpectationsMatched(); @@ -396,7 +360,7 @@ public BoundTransportAddress boundAddress() { try { Loggers.addAppender(logger, appender); final List transportAddresses = seedHostsResolver.resolveHosts( - Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), 1); + Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301")); assertThat(transportAddresses, hasSize(1)); // only one of the two is valid and will be used assertThat(transportAddresses.get(0).getAddress(), equalTo("127.0.0.1")); assertThat(transportAddresses.get(0).getPort(), equalTo(9301)); diff --git a/server/src/test/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProviderTests.java b/server/src/test/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProviderTests.java index d98e152149382..226b61c002b5d 100644 --- a/server/src/test/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProviderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/SettingsBasedSeedHostsProviderTests.java @@ -37,18 +37,15 @@ public class SettingsBasedSeedHostsProviderTests extends ESTestCase { private class AssertingHostsResolver implements HostsResolver { private final Set expectedHosts; - private final int expectedPortCount; private boolean resolvedHosts; - AssertingHostsResolver(int expectedPortCount, String... expectedHosts) { - this.expectedPortCount = expectedPortCount; + AssertingHostsResolver(String... expectedHosts) { this.expectedHosts = Sets.newHashSet(expectedHosts); } @Override - public List resolveHosts(List hosts, int limitPortCounts) { - assertEquals(expectedPortCount, limitPortCounts); + public List resolveHosts(List hosts) { assertEquals(expectedHosts, Sets.newHashSet(hosts)); resolvedHosts = true; return emptyList(); @@ -60,15 +57,19 @@ boolean getResolvedHosts() { } public void testScansPortsByDefault() { - final AssertingHostsResolver hostsResolver = new AssertingHostsResolver(5, "::1", "127.0.0.1"); + final AssertingHostsResolver hostsResolver = new AssertingHostsResolver( + "[::1]:9300", "[::1]:9301", "127.0.0.1:9300", "127.0.0.1:9301" + ); final TransportService transportService = mock(TransportService.class); - when(transportService.getLocalAddresses()).thenReturn(Arrays.asList("::1", "127.0.0.1")); + when(transportService.getDefaultSeedAddresses()).thenReturn( + Arrays.asList("[::1]:9300", "[::1]:9301", "127.0.0.1:9300", "127.0.0.1:9301") + ); new SettingsBasedSeedHostsProvider(Settings.EMPTY, transportService).getSeedAddresses(hostsResolver); assertTrue(hostsResolver.getResolvedHosts()); } public void testGetsHostsFromSetting() { - final AssertingHostsResolver hostsResolver = new AssertingHostsResolver(1, "bar", "foo"); + final AssertingHostsResolver hostsResolver = new AssertingHostsResolver("bar", "foo"); new SettingsBasedSeedHostsProvider(Settings.builder() .putList(SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING.getKey(), "foo", "bar") .build(), null).getSeedAddresses(hostsResolver); diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java index 36f75c79a1792..37e260a01d069 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentIT.java @@ -19,12 +19,18 @@ package org.elasticsearch.env; +import org.elasticsearch.Version; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.startsWith; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) @@ -86,4 +92,35 @@ public Settings onNodeStopped(String nodeName) { + Node.NODE_DATA_SETTING.getKey() + "=false, but has shard data")); } + + private IllegalStateException expectThrowsOnRestart(CheckedConsumer onNodeStopped) { + internalCluster().startNode(); + final Path[] dataPaths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths(); + return expectThrows(IllegalStateException.class, + () -> internalCluster().restartRandomDataNode(new InternalTestCluster.RestartCallback() { + @Override + public Settings onNodeStopped(String nodeName) { + try { + onNodeStopped.accept(dataPaths); + } catch (Exception e) { + throw new AssertionError(e); + } + return Settings.EMPTY; + } + })); + } + + public void testFailsToStartIfDowngraded() { + final IllegalStateException illegalStateException = expectThrowsOnRestart(dataPaths -> + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(randomAlphaOfLength(10), NodeMetaDataTests.tooNewVersion()), dataPaths)); + assertThat(illegalStateException.getMessage(), + allOf(startsWith("cannot downgrade a node from version ["), endsWith("] to version [" + Version.CURRENT + "]"))); + } + + public void testFailsToStartIfUpgradedTooFar() { + final IllegalStateException illegalStateException = expectThrowsOnRestart(dataPaths -> + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(randomAlphaOfLength(10), NodeMetaDataTests.tooOldVersion()), dataPaths)); + assertThat(illegalStateException.getMessage(), + allOf(startsWith("cannot upgrade a node from version ["), endsWith("] directly to version [" + Version.CURRENT + "]"))); + } } diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetaDataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetaDataTests.java new file mode 100644 index 0000000000000..59cf6247f9613 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/env/NodeMetaDataTests.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.env; + +import org.elasticsearch.Version; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.gateway.MetaDataStateFormat; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; + +public class NodeMetaDataTests extends ESTestCase { + private Version randomVersion() { + // VersionUtils.randomVersion() only returns known versions, which are necessarily no later than Version.CURRENT; however we want + // also to consider our behaviour with all versions, so occasionally pick up a truly random version. + return rarely() ? Version.fromId(randomInt()) : VersionUtils.randomVersion(random()); + } + + public void testEqualsHashcodeSerialization() { + final Path tempDir = createTempDir(); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(new NodeMetaData(randomAlphaOfLength(10), randomVersion()), + nodeMetaData -> { + final long generation = NodeMetaData.FORMAT.writeAndCleanup(nodeMetaData, tempDir); + final Tuple nodeMetaDataLongTuple + = NodeMetaData.FORMAT.loadLatestStateWithGeneration(logger, xContentRegistry(), tempDir); + assertThat(nodeMetaDataLongTuple.v2(), equalTo(generation)); + return nodeMetaDataLongTuple.v1(); + }, nodeMetaData -> { + if (randomBoolean()) { + return new NodeMetaData(randomAlphaOfLength(21 - nodeMetaData.nodeId().length()), nodeMetaData.nodeVersion()); + } else { + return new NodeMetaData(nodeMetaData.nodeId(), randomValueOtherThan(nodeMetaData.nodeVersion(), this::randomVersion)); + } + }); + } + + public void testReadsFormatWithoutVersion() throws IOException { + // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier + assertTrue(Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(Version.V_7_0_0)); + // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource + // which do not have the version field + + final Path tempDir = createTempDir(); + final Path stateDir = Files.createDirectory(tempDir.resolve(MetaDataStateFormat.STATE_DIR_NAME)); + final InputStream resource = this.getClass().getResourceAsStream("testReadsFormatWithoutVersion.binary"); + assertThat(resource, notNullValue()); + Files.copy(resource, stateDir.resolve(NodeMetaData.FORMAT.getStateFileName(between(0, Integer.MAX_VALUE)))); + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), tempDir); + assertThat(nodeMetaData.nodeId(), equalTo("y6VUVMSaStO4Tz-B5BxcOw")); + assertThat(nodeMetaData.nodeVersion(), equalTo(Version.V_EMPTY)); + } + + public void testUpgradesLegitimateVersions() { + final String nodeId = randomAlphaOfLength(10); + final NodeMetaData nodeMetaData = new NodeMetaData(nodeId, + randomValueOtherThanMany(v -> v.after(Version.CURRENT) || v.before(Version.CURRENT.minimumIndexCompatibilityVersion()), + this::randomVersion)).upgradeToCurrentVersion(); + assertThat(nodeMetaData.nodeVersion(), equalTo(Version.CURRENT)); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + } + + public void testUpgradesMissingVersion() { + final String nodeId = randomAlphaOfLength(10); + final NodeMetaData nodeMetaData = new NodeMetaData(nodeId, Version.V_EMPTY).upgradeToCurrentVersion(); + assertThat(nodeMetaData.nodeVersion(), equalTo(Version.CURRENT)); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + } + + public void testDoesNotUpgradeFutureVersion() { + final IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, + () -> new NodeMetaData(randomAlphaOfLength(10), tooNewVersion()) + .upgradeToCurrentVersion()); + assertThat(illegalStateException.getMessage(), + allOf(startsWith("cannot downgrade a node from version ["), endsWith("] to version [" + Version.CURRENT + "]"))); + } + + public void testDoesNotUpgradeAncientVersion() { + final IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, + () -> new NodeMetaData(randomAlphaOfLength(10), tooOldVersion()).upgradeToCurrentVersion()); + assertThat(illegalStateException.getMessage(), + allOf(startsWith("cannot upgrade a node from version ["), endsWith("] directly to version [" + Version.CURRENT + "]"))); + } + + public static Version tooNewVersion() { + return Version.fromId(between(Version.CURRENT.id + 1, 99999999)); + } + + public static Version tooOldVersion() { + return Version.fromId(between(1, Version.CURRENT.minimumIndexCompatibilityVersion().id - 1)); + } +} diff --git a/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java new file mode 100644 index 0000000000000..704617c7b5e95 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/env/OverrideNodeVersionCommandTests.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.env; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.WriteStateException; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class OverrideNodeVersionCommandTests extends ESTestCase { + + private Environment environment; + private Path[] nodePaths; + + @Before + public void createNodePaths() throws IOException { + final Settings settings = buildEnvSettings(Settings.EMPTY); + environment = TestEnvironment.newEnvironment(settings); + try (NodeEnvironment nodeEnvironment = new NodeEnvironment(settings, environment)) { + nodePaths = nodeEnvironment.nodeDataPaths(); + } + } + + public void testFailsOnEmptyPath() { + final Path emptyPath = createTempDir(); + final MockTerminal mockTerminal = new MockTerminal(); + final ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, () -> + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, new Path[]{emptyPath}, environment)); + assertThat(elasticsearchException.getMessage(), equalTo(OverrideNodeVersionCommand.NO_METADATA_MESSAGE)); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + } + + public void testFailsIfUnnecessary() throws WriteStateException { + final Version nodeVersion = Version.fromId(between(Version.CURRENT.minimumIndexCompatibilityVersion().id, Version.CURRENT.id)); + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(randomAlphaOfLength(10), nodeVersion), nodePaths); + final MockTerminal mockTerminal = new MockTerminal(); + final ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, () -> + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, nodePaths, environment)); + assertThat(elasticsearchException.getMessage(), allOf( + containsString("compatible with current version"), + containsString(Version.CURRENT.toString()), + containsString(nodeVersion.toString()))); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + } + + public void testWarnsIfTooOld() throws Exception { + final String nodeId = randomAlphaOfLength(10); + final Version nodeVersion = NodeMetaDataTests.tooOldVersion(); + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(nodeId, nodeVersion), nodePaths); + final MockTerminal mockTerminal = new MockTerminal(); + mockTerminal.addTextInput("n\n"); + final ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, () -> + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, nodePaths, environment)); + assertThat(elasticsearchException.getMessage(), equalTo("aborted by user")); + assertThat(mockTerminal.getOutput(), allOf( + containsString("too old"), + containsString("data loss"), + containsString("You should not use this tool"), + containsString(Version.CURRENT.toString()), + containsString(nodeVersion.toString()))); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), nodePaths); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + assertThat(nodeMetaData.nodeVersion(), equalTo(nodeVersion)); + } + + public void testWarnsIfTooNew() throws Exception { + final String nodeId = randomAlphaOfLength(10); + final Version nodeVersion = NodeMetaDataTests.tooNewVersion(); + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(nodeId, nodeVersion), nodePaths); + final MockTerminal mockTerminal = new MockTerminal(); + mockTerminal.addTextInput(randomFrom("yy", "Yy", "n", "yes", "true", "N", "no")); + final ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class, () -> + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, nodePaths, environment)); + assertThat(elasticsearchException.getMessage(), equalTo("aborted by user")); + assertThat(mockTerminal.getOutput(), allOf( + containsString("data loss"), + containsString("You should not use this tool"), + containsString(Version.CURRENT.toString()), + containsString(nodeVersion.toString()))); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), nodePaths); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + assertThat(nodeMetaData.nodeVersion(), equalTo(nodeVersion)); + } + + public void testOverwritesIfTooOld() throws Exception { + final String nodeId = randomAlphaOfLength(10); + final Version nodeVersion = NodeMetaDataTests.tooOldVersion(); + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(nodeId, nodeVersion), nodePaths); + final MockTerminal mockTerminal = new MockTerminal(); + mockTerminal.addTextInput(randomFrom("y", "Y")); + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, nodePaths, environment); + assertThat(mockTerminal.getOutput(), allOf( + containsString("too old"), + containsString("data loss"), + containsString("You should not use this tool"), + containsString(Version.CURRENT.toString()), + containsString(nodeVersion.toString()), + containsString(OverrideNodeVersionCommand.SUCCESS_MESSAGE))); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), nodePaths); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + assertThat(nodeMetaData.nodeVersion(), equalTo(Version.CURRENT)); + } + + public void testOverwritesIfTooNew() throws Exception { + final String nodeId = randomAlphaOfLength(10); + final Version nodeVersion = NodeMetaDataTests.tooNewVersion(); + NodeMetaData.FORMAT.writeAndCleanup(new NodeMetaData(nodeId, nodeVersion), nodePaths); + final MockTerminal mockTerminal = new MockTerminal(); + mockTerminal.addTextInput(randomFrom("y", "Y")); + new OverrideNodeVersionCommand().processNodePaths(mockTerminal, nodePaths, environment); + assertThat(mockTerminal.getOutput(), allOf( + containsString("data loss"), + containsString("You should not use this tool"), + containsString(Version.CURRENT.toString()), + containsString(nodeVersion.toString()), + containsString(OverrideNodeVersionCommand.SUCCESS_MESSAGE))); + expectThrows(IllegalStateException.class, () -> mockTerminal.readText("")); + + final NodeMetaData nodeMetaData = NodeMetaData.FORMAT.loadLatestState(logger, xContentRegistry(), nodePaths); + assertThat(nodeMetaData.nodeId(), equalTo(nodeId)); + assertThat(nodeMetaData.nodeVersion(), equalTo(Version.CURRENT)); + } +} diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java index d6fd80f3513c2..4049cec796102 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java @@ -32,8 +32,7 @@ private GatewayService createService(final Settings.Builder settings) { final ClusterService clusterService = new ClusterService(Settings.builder().put("cluster.name", "GatewayServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null); - return new GatewayService(settings.build(), - null, clusterService, null, null, null, null); + return new GatewayService(settings.build(), null, clusterService, null, null, null); } public void testDefaultRecoverAfterTime() { diff --git a/server/src/test/java/org/elasticsearch/gateway/MockGatewayMetaState.java b/server/src/test/java/org/elasticsearch/gateway/MockGatewayMetaState.java index 7541ca860def6..7749a0edc37b8 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MockGatewayMetaState.java +++ b/server/src/test/java/org/elasticsearch/gateway/MockGatewayMetaState.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.transport.TransportService; @@ -46,10 +45,9 @@ public class MockGatewayMetaState extends GatewayMetaState { public MockGatewayMetaState(Settings settings, NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry, DiscoveryNode localNode) throws IOException { - super(settings, nodeEnvironment, new MetaStateService(nodeEnvironment, xContentRegistry), + super(settings, new MetaStateService(nodeEnvironment, xContentRegistry), mock(MetaDataIndexUpgradeService.class), mock(MetaDataUpgrader.class), - mock(TransportService.class), mock(ClusterService.class), - mock(IndicesService.class)); + mock(TransportService.class), mock(ClusterService.class)); this.localNode = localNode; } diff --git a/server/src/test/java/org/elasticsearch/get/LegacyGetActionIT.java b/server/src/test/java/org/elasticsearch/get/LegacyGetActionIT.java deleted file mode 100644 index 4382f677ad63e..0000000000000 --- a/server/src/test/java/org/elasticsearch/get/LegacyGetActionIT.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.get; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.test.ESIntegTestCase; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.get.GetActionIT.indexOrAlias; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; - -public class LegacyGetActionIT extends ESIntegTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - public void testGetFieldsMetaDataWithRouting() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("_doc", "field1", "type=keyword,store=true") - .addAlias(new Alias("alias")) - .setSettings( - Settings.builder() - .put("index.refresh_interval", -1) - .put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), Version.V_6_0_0))); // multi-types in 6.0.0 - - try (XContentBuilder source = jsonBuilder().startObject().field("field1", "value").endObject()) { - client() - .prepareIndex("test", "_doc", "1") - .setRouting("1") - .setSource(source) - .get(); - } - - { - final GetResponse getResponse = client() - .prepareGet(indexOrAlias(), "_doc", "1") - .setRouting("1") - .setStoredFields("field1") - .get(); - assertThat(getResponse.isExists(), equalTo(true)); - assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); - assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); - assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); - assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); - } - - flush(); - - { - final GetResponse getResponse = client() - .prepareGet(indexOrAlias(), "_doc", "1") - .setStoredFields("field1") - .setRouting("1") - .get(); - assertThat(getResponse.isExists(), equalTo(true)); - assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); - assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); - assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); - assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); - } - } - -} diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index b836a5d0372b8..f7437ad8ec5a9 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -123,13 +123,13 @@ public TokenStream create(TokenStream tokenStream) { Analyzer analyzer = new CustomAnalyzer("tokenizerName", null, new CharFilterFactory[0], new TokenFilterFactory[] { tokenFilter }); MapperException ex = expectThrows(MapperException.class, () -> emptyRegistry.build(IndexSettingsModule.newIndexSettings("index", settings), - singletonMap("default", new PreBuiltAnalyzerProvider("my_analyzer", AnalyzerScope.INDEX, analyzer)), emptyMap(), + singletonMap("default", new PreBuiltAnalyzerProvider("default", AnalyzerScope.INDEX, analyzer)), emptyMap(), emptyMap(), emptyMap(), emptyMap())); - assertEquals("analyzer [my_analyzer] contains filters [my_filter] that are not allowed to run in all mode.", ex.getMessage()); + assertEquals("analyzer [default] contains filters [my_filter] that are not allowed to run in all mode.", ex.getMessage()); } public void testOverrideDefaultIndexAnalyzerIsUnsupported() { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, Version.CURRENT); + Version version = VersionUtils.randomIndexCompatibleVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalyzerProvider defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/analysis/IndexAnalyzersTests.java b/server/src/test/java/org/elasticsearch/index/analysis/IndexAnalyzersTests.java new file mode 100644 index 0000000000000..f5808c4b0da5c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/IndexAnalyzersTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class IndexAnalyzersTests extends ESTestCase { + + /** + * test the checks in the constructor + */ + public void testAnalyzerMapChecks() { + Map analyzers = new HashMap<>(); + { + NullPointerException ex = expectThrows(NullPointerException.class, + () -> new IndexAnalyzers(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY), analyzers, + Collections.emptyMap(), Collections.emptyMap())); + assertEquals("the default analyzer must be set", ex.getMessage()); + } + { + analyzers.put(AnalysisRegistry.DEFAULT_ANALYZER_NAME, + new NamedAnalyzer("otherName", AnalyzerScope.INDEX, new StandardAnalyzer())); + IllegalStateException ex = expectThrows(IllegalStateException.class, + () -> new IndexAnalyzers(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY), analyzers, + Collections.emptyMap(), Collections.emptyMap())); + assertEquals("default analyzer must have the name [default] but was: [otherName]", ex.getMessage()); + } + } + + public void testAnalyzerDefaults() throws IOException { + Map analyzers = new HashMap<>(); + NamedAnalyzer analyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); + analyzers.put(AnalysisRegistry.DEFAULT_ANALYZER_NAME, analyzer); + + // if only "default" is set in the map, all getters should return the same analyzer + try (IndexAnalyzers indexAnalyzers = new IndexAnalyzers(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY), analyzers, + Collections.emptyMap(), Collections.emptyMap())) { + assertSame(analyzer, indexAnalyzers.getDefaultIndexAnalyzer()); + assertSame(analyzer, indexAnalyzers.getDefaultSearchAnalyzer()); + assertSame(analyzer, indexAnalyzers.getDefaultSearchQuoteAnalyzer()); + } + + analyzers.put(AnalysisRegistry.DEFAULT_SEARCH_ANALYZER_NAME, + new NamedAnalyzer("my_search_analyzer", AnalyzerScope.INDEX, new StandardAnalyzer())); + try (IndexAnalyzers indexAnalyzers = new IndexAnalyzers(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY), analyzers, + Collections.emptyMap(), Collections.emptyMap())) { + assertSame(analyzer, indexAnalyzers.getDefaultIndexAnalyzer()); + assertEquals("my_search_analyzer", indexAnalyzers.getDefaultSearchAnalyzer().name()); + assertEquals("my_search_analyzer", indexAnalyzers.getDefaultSearchQuoteAnalyzer().name()); + } + + analyzers.put(AnalysisRegistry.DEFAULT_SEARCH_QUOTED_ANALYZER_NAME, + new NamedAnalyzer("my_search_quote_analyzer", AnalyzerScope.INDEX, new StandardAnalyzer())); + try (IndexAnalyzers indexAnalyzers = new IndexAnalyzers(IndexSettingsModule.newIndexSettings("index", Settings.EMPTY), analyzers, + Collections.emptyMap(), Collections.emptyMap())) { + assertSame(analyzer, indexAnalyzers.getDefaultIndexAnalyzer()); + assertEquals("my_search_analyzer", indexAnalyzers.getDefaultSearchAnalyzer().name()); + assertEquals("my_search_quote_analyzer", indexAnalyzers.getDefaultSearchQuoteAnalyzer().name()); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index cdc38cd3abd0d..65958ec9319c2 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Collection; @@ -61,17 +62,17 @@ public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_6_0_0))); + is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT.minimumIndexCompatibilityVersion()))); } public void testThatInstancesAreCachedAndReused() { assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); // same es version should be cached - assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1), - PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1)); - assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_0), - PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_1)); + Version v = VersionUtils.randomVersion(random()); + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(v), PreBuiltAnalyzers.STANDARD.getAnalyzer(v)); + assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), + PreBuiltAnalyzers.STANDARD.getAnalyzer(VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT))); // Same Lucene version should be cached: assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_1), diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index ae11500e54e5e..82c4035cfa7db 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -21,6 +21,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -2290,7 +2291,7 @@ public void testSeqNoAndCheckpoints() throws IOException { ReplicationTracker gcpTracker = (ReplicationTracker) initialEngine.config().getGlobalCheckpointSupplier(); gcpTracker.updateFromMaster(1L, new HashSet<>(Arrays.asList(primary.allocationId().getId(), replica.allocationId().getId())), - new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(replica).build(), Collections.emptySet()); + new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(replica).build()); gcpTracker.activatePrimaryMode(primarySeqNo); for (int op = 0; op < opCount; op++) { final String id; diff --git a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java index 3a857a2046842..f03500e6e1250 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/NoOpEngineTests.java @@ -75,7 +75,7 @@ public void testNoopAfterRegularEngine() throws IOException { ShardRouting routing = TestShardRouting.newShardRouting("test", shardId.id(), "node", null, true, ShardRoutingState.STARTED, allocationId); IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(shardId).addShard(routing).build(); - tracker.updateFromMaster(1L, Collections.singleton(allocationId.getId()), table, Collections.emptySet()); + tracker.updateFromMaster(1L, Collections.singleton(allocationId.getId()), table); tracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); for (int i = 0; i < docs; i++) { ParsedDocument doc = testParsedDocument("" + i, null, testDocumentWithTextField(), B_1, null); diff --git a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java index f9437ac9251bf..e0ad514e6dbb9 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java @@ -18,9 +18,12 @@ */ package org.elasticsearch.index.engine; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.seqno.SeqNoStats; @@ -32,7 +35,9 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; +import static org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader.getElasticsearchDirectoryReader; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; public class ReadOnlyEngineTests extends EngineTestCase { @@ -80,6 +85,13 @@ public void testReadOnlyEngine() throws Exception { Engine.Searcher external = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL); Engine.Searcher internal = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); assertSame(external.reader(), internal.reader()); + assertThat(external.reader(), instanceOf(DirectoryReader.class)); + DirectoryReader dirReader = external.getDirectoryReader(); + ElasticsearchDirectoryReader esReader = getElasticsearchDirectoryReader(dirReader); + IndexReader.CacheHelper helper = esReader.getReaderCacheHelper(); + assertNotNull(helper); + assertEquals(helper.getKey(), dirReader.getReaderCacheHelper().getKey()); + IOUtils.close(external, internal); // the locked down engine should still point to the previous commit assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint())); @@ -88,7 +100,6 @@ public void testReadOnlyEngine() throws Exception { try (Engine.GetResult getResult = readOnlyEngine.get(get, readOnlyEngine::acquireSearcher)) { assertTrue(getResult.exists()); } - } // Close and reopen the main engine try (InternalEngine recoveringEngine = new InternalEngine(config)) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java index 34200b51cb317..1a6e6e2c90aed 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AllFieldMapperTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -28,9 +26,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.VersionUtils; - -import static org.hamcrest.CoreMatchers.containsString; public class AllFieldMapperTests extends ESSingleNodeTestCase { @@ -39,64 +34,6 @@ protected boolean forbidPrivateIndexSettings() { return false; } - public void testAllDisabled() throws Exception { - { - final Version version = VersionUtils.randomVersionBetween(random(), - Version.V_6_0_0, Version.V_7_0_0.minimumCompatibilityVersion()); - IndexService indexService = createIndex("test_6x", - Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, version) - .build() - ); - String mappingDisabled = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("_all") - .field("enabled", false) - .endObject().endObject() - ); - indexService.mapperService().merge("_doc", new CompressedXContent(mappingDisabled), MergeReason.MAPPING_UPDATE); - - String mappingEnabled = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("_all") - .field("enabled", true) - .endObject().endObject() - ); - MapperParsingException exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingEnabled), MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("[_all] is disabled in this version.")); - } - { - IndexService indexService = createIndex("test"); - String mappingEnabled = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("_all") - .field("enabled", true) - .endObject().endObject() - ); - MapperParsingException exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingEnabled), MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("unsupported parameters: [_all")); - - String mappingDisabled = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("_all") - .field("enabled", false) - .endObject().endObject() - ); - exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingDisabled), MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("unsupported parameters: [_all")); - - String mappingAll = Strings.toString(XContentFactory.jsonBuilder().startObject() - .startObject("_all").endObject().endObject() - ); - exc = expectThrows(MapperParsingException.class, - () -> indexService.mapperService().merge("_doc", new CompressedXContent(mappingAll), MergeReason.MAPPING_UPDATE)); - assertThat(exc.getMessage(), containsString("unsupported parameters: [_all")); - - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().endObject()); - indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - assertEquals("{\"_doc\":{}}", indexService.mapperService().documentMapper("_doc").mapping().toString()); - } - } - public void testUpdateDefaultSearchAnalyzer() throws Exception { IndexService indexService = createIndex("test", Settings.builder() .put("index.analysis.analyzer.default_search.type", "custom") diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index a910c2c86bab8..5604f4240ce53 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -40,7 +40,7 @@ public void testParseUnknownParam() throws Exception { templateDef.put("random_param", "random_value"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1)); + () -> DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion())); assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage()); } @@ -50,7 +50,7 @@ public void testParseUnknownMatchType() { templateDef2.put("mapping", Collections.singletonMap("store", true)); // if a wrong match type is specified, we ignore the template IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> DynamicTemplate.parse("my_template", templateDef2, Version.V_6_0_0_alpha1)); + () -> DynamicTemplate.parse("my_template", templateDef2, Version.CURRENT.minimumIndexCompatibilityVersion())); assertEquals("No field type matched on [text], possible values are [object, string, long, double, boolean, date, binary]", e.getMessage()); } @@ -63,7 +63,7 @@ public void testParseInvalidRegex() { templateDef.put("match_pattern", "regex"); templateDef.put("mapping", Collections.singletonMap("store", true)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_3_0)); + () -> DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion())); assertEquals("Pattern [*a] of type [regex] is invalid. Cannot create dynamic template [my_template].", e.getMessage()); } } @@ -72,7 +72,7 @@ public void testMatchAllTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "*"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); assertTrue(template.match("a.b", "b", randomFrom(XContentFieldType.values()))); } @@ -80,7 +80,7 @@ public void testMatchTypeTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "string"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); assertTrue(template.match("a.b", "b", XContentFieldType.STRING)); assertFalse(template.match("a.b", "b", XContentFieldType.BOOLEAN)); } @@ -90,7 +90,7 @@ public void testSerialization() throws Exception { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "string"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); XContentBuilder builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); @@ -100,7 +100,7 @@ public void testSerialization() throws Exception { templateDef.put("match", "*name"); templateDef.put("unmatch", "first_name"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); @@ -110,7 +110,7 @@ public void testSerialization() throws Exception { templateDef.put("path_match", "*name"); templateDef.put("path_unmatch", "first_name"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", @@ -121,7 +121,7 @@ public void testSerialization() throws Exception { templateDef.put("match", "^a$"); templateDef.put("match_pattern", "regex"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.CURRENT.minimumIndexCompatibilityVersion()); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index dd7cb17ef127c..1bdf40bcc6708 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -401,7 +401,8 @@ public void testUpdateNormalizer() throws IOException { () -> indexService.mapperService().merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); assertEquals( - "Mapper for [field] conflicts with existing mapping:\n[mapper [field] has different [normalizer]]", + "Mapper for [field] conflicts with existing mapping:\n" + + "[mapper [field] has different [analyzer], mapper [field] has different [normalizer]]", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index d3f41589fb1fd..edca517830833 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -19,10 +19,7 @@ package org.elasticsearch.index.mapper; -import java.util.HashSet; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; @@ -35,12 +32,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -698,55 +695,4 @@ protected boolean forbidPrivateIndexSettings() { */ return false; } - - public void testReorderParentBWC() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("nested1").field("type", "nested").endObject() - .endObject().endObject().endObject()); - - Version bwcVersion = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_4_0); - for (Version version : new Version[] {Version.V_6_5_0, bwcVersion}) { - DocumentMapper docMapper = createIndex("test-" + version, - Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), version).build()) - .mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - assertThat(docMapper.hasNestedObjects(), equalTo(true)); - ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1"); - assertThat(nested1Mapper.nested().isNested(), equalTo(true)); - - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .startArray("nested1") - .startObject() - .field("field1", "1") - .field("field2", "2") - .endObject() - .startObject() - .field("field1", "3") - .field("field2", "4") - .endObject() - .endArray() - .endObject()), - XContentType.JSON)); - - assertThat(doc.docs().size(), equalTo(3)); - if (version.onOrAfter(Version.V_6_5_0)) { - assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); - assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1")); - assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2")); - assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3")); - assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4")); - assertThat(doc.docs().get(2).get("field"), equalTo("value")); - } else { - assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); - assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3")); - assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4")); - assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1")); - assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2")); - assertThat(doc.docs().get(2).get("field"), equalTo("value")); - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java index 4e6f504e99263..79a93c04faa1f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldTypeTests.java @@ -67,7 +67,7 @@ public void testTermsQuery() throws Exception { Mockito.when(mapperService.hasNested()).thenReturn(true); query = ft.termQuery("my_type", context); - assertEquals(Queries.newNonNestedFilter(context.indexVersionCreated()), query); + assertEquals(Queries.newNonNestedFilter(), query); mapper = Mockito.mock(DocumentMapper.class); Mockito.when(mapper.type()).thenReturn("other_type"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java index 7e216c37686ee..bc59c59aa54ab 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java @@ -40,6 +40,9 @@ import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_ANALYZER_NAME; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_SEARCH_ANALYZER_NAME; +import static org.elasticsearch.index.analysis.AnalysisRegistry.DEFAULT_SEARCH_QUOTED_ANALYZER_NAME; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -57,22 +60,20 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { Mapper.TypeParser.ParserContext parserContext = mock(Mapper.TypeParser.ParserContext.class); // check AnalysisMode.ALL works - Map analyzers = new HashMap<>(); + Map analyzers = defaultAnalyzers(); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", AnalysisMode.ALL))); - IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, - null, analyzers, Collections.emptyMap(), Collections.emptyMap()); + IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext); // check that "analyzer" set to something that only supports AnalysisMode.SEARCH_TIME or AnalysisMode.INDEX_TIME is blocked AnalysisMode mode = randomFrom(AnalysisMode.SEARCH_TIME, AnalysisMode.INDEX_TIME); - analyzers = new HashMap<>(); + analyzers = defaultAnalyzers(); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", mode))); - indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, null, analyzers, - Collections.emptyMap(), Collections.emptyMap()); + indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); MapperException ex = expectThrows(MapperException.class, () -> TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext)); @@ -80,6 +81,14 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { ex.getMessage()); } + private static Map defaultAnalyzers() { + Map analyzers = new HashMap<>(); + analyzers.put(DEFAULT_ANALYZER_NAME, new NamedAnalyzer("default", AnalyzerScope.INDEX, null)); + analyzers.put(DEFAULT_SEARCH_ANALYZER_NAME, new NamedAnalyzer("default", AnalyzerScope.INDEX, null)); + analyzers.put(DEFAULT_SEARCH_QUOTED_ANALYZER_NAME, new NamedAnalyzer("default", AnalyzerScope.INDEX, null)); + return analyzers; + } + public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { TextFieldMapper.Builder builder = new TextFieldMapper.Builder("textField"); for (String settingToTest : new String[] { "search_analyzer", "search_quote_analyzer" }) { @@ -92,25 +101,23 @@ public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { Mapper.TypeParser.ParserContext parserContext = mock(Mapper.TypeParser.ParserContext.class); // check AnalysisMode.ALL and AnalysisMode.SEARCH_TIME works - Map analyzers = new HashMap<>(); + Map analyzers = defaultAnalyzers(); AnalysisMode mode = randomFrom(AnalysisMode.ALL, AnalysisMode.SEARCH_TIME); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", mode))); analyzers.put("standard", new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer())); - IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, - null, analyzers, Collections.emptyMap(), Collections.emptyMap()); + IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext); // check that "analyzer" set to AnalysisMode.INDEX_TIME is blocked mode = AnalysisMode.INDEX_TIME; - analyzers = new HashMap<>(); + analyzers = defaultAnalyzers(); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", mode))); analyzers.put("standard", new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer())); - indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, null, - analyzers, Collections.emptyMap(), Collections.emptyMap()); + indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); MapperException ex = expectThrows(MapperException.class, () -> TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext)); @@ -127,11 +134,10 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { // check that "analyzer" set to AnalysisMode.INDEX_TIME is blocked if there is no search analyzer AnalysisMode mode = AnalysisMode.INDEX_TIME; - Map analyzers = new HashMap<>(); + Map analyzers = defaultAnalyzers(); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", mode))); - IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, - null, analyzers, Collections.emptyMap(), Collections.emptyMap()); + IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); MapperException ex = expectThrows(MapperException.class, () -> TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext)); @@ -140,14 +146,13 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { // check AnalysisMode.INDEX_TIME is okay if search analyzer is also set fieldNode.put("search_analyzer", "standard"); - analyzers = new HashMap<>(); + analyzers = defaultAnalyzers(); mode = randomFrom(AnalysisMode.ALL, AnalysisMode.INDEX_TIME); analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode("my_analyzer", mode))); analyzers.put("standard", new NamedAnalyzer("standard", AnalyzerScope.INDEX, new StandardAnalyzer())); - indexAnalyzers = new IndexAnalyzers(indexSettings, new NamedAnalyzer("default", AnalyzerScope.INDEX, null), null, null, analyzers, - Collections.emptyMap(), Collections.emptyMap()); + indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); TypeParsers.parseTextField(builder, "name", new HashMap<>(fieldNode), parserContext); } diff --git a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index a5329856630d5..47bd8d8a34c14 100644 --- a/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -65,26 +64,7 @@ protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, Collection fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern); Collection mappedFields = fields.stream().filter((field) -> context.getQueryShardContext().getObjectMapper(field) != null || context.getQueryShardContext().getMapperService().fullName(field) != null).collect(Collectors.toList()); - if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { - if (fields.size() == 1) { - assertThat(query, instanceOf(ConstantScoreQuery.class)); - ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; - String field = expectedFieldName(fields.iterator().next()); - assertThat(constantScoreQuery.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) constantScoreQuery.getQuery(); - assertEquals(field, termQuery.getTerm().text()); - } else { - assertThat(query, instanceOf(ConstantScoreQuery.class)); - ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query; - assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class)); - BooleanQuery booleanQuery = (BooleanQuery) constantScoreQuery.getQuery(); - assertThat(booleanQuery.clauses().size(), equalTo(mappedFields.size())); - for (int i = 0; i < mappedFields.size(); i++) { - BooleanClause booleanClause = booleanQuery.clauses().get(i); - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - } - } - } else if (fields.size() == 1 && mappedFields.size() == 0) { + if (fields.size() == 1 && mappedFields.size() == 0) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; assertThat(matchNoDocsQuery.toString(null), diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 54d478a7f6aec..db32c251fd3f4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -146,7 +146,7 @@ public static InnerHitBuilder randomNestedInnerHits() { } public static InnerHitBuilder randomInnerHits() { InnerHitBuilder innerHits = new InnerHitBuilder(); - innerHits.setName(randomAlphaOfLengthBetween(1, 16)); + innerHits.setName(randomAlphaOfLengthBetween(5, 16)); innerHits.setFrom(randomIntBetween(0, 32)); innerHits.setSize(randomIntBetween(0, 32)); innerHits.setExplain(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java b/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java deleted file mode 100644 index dc2a4a0e3fffe..0000000000000 --- a/server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.SpatialStrategy; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.geo.RandomShapeGenerator; -import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; - -import java.io.IOException; - -public class LegacyGeoShapeFieldQueryTests extends GeoShapeQueryBuilderTests { - - @Override - protected String fieldName() { - return GEO_SHAPE_FIELD_NAME; - } - - @Override - protected Settings createTestIndexSettings() { - // force the legacy shape impl - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_5_0); - return Settings.builder() - .put(super.createTestIndexSettings()) - .put(IndexMetaData.SETTING_VERSION_CREATED, version) - .build(); - } - - @Override - protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { - ShapeType shapeType = ShapeType.randomType(random()); - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); - GeoShapeQueryBuilder builder; - clearShapeFields(); - if (indexedShape == false) { - builder = new GeoShapeQueryBuilder(fieldName(), shape); - } else { - indexedShapeToReturn = shape; - indexedShapeId = randomAlphaOfLengthBetween(3, 20); - builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId); - if (randomBoolean()) { - indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapeIndex(indexedShapeIndex); - } - if (randomBoolean()) { - indexedShapePath = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapePath(indexedShapePath); - } - if (randomBoolean()) { - indexedShapeRouting = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapeRouting(indexedShapeRouting); - } - } - if (randomBoolean()) { - SpatialStrategy strategy = randomFrom(SpatialStrategy.values()); - // ShapeType.MULTILINESTRING + SpatialStrategy.TERM can lead to large queries and will slow down tests, so - // we try to avoid that combination - while (shapeType == ShapeType.MULTILINESTRING && strategy == SpatialStrategy.TERM) { - strategy = randomFrom(SpatialStrategy.values()); - } - builder.strategy(strategy); - if (strategy != SpatialStrategy.TERM) { - builder.relation(randomFrom(ShapeRelation.values())); - } - } - - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - public void testInvalidRelation() throws IOException { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); - GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); - expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM)); - GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder3.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 5ace39c0890df..001df6deb5647 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -50,7 +50,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; @@ -1033,8 +1032,7 @@ public void testExistsFieldQuery() throws Exception { QueryShardContext context = createShardContext(); QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder(STRING_FIELD_NAME + ":*"); Query query = queryBuilder.toQuery(context); - if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) { + if (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false) { assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME)))); } else { assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME))))); @@ -1044,8 +1042,7 @@ public void testExistsFieldQuery() throws Exception { String value = (quoted ? "\"" : "") + STRING_FIELD_NAME + (quoted ? "\"" : ""); queryBuilder = new QueryStringQueryBuilder("_exists_:" + value); query = queryBuilder.toQuery(context); - if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false)) { + if (context.fieldMapper(STRING_FIELD_NAME).omitNorms() == false) { assertThat(query, equalTo(new ConstantScoreQuery(new NormsFieldExistsQuery(STRING_FIELD_NAME)))); } else { assertThat(query, equalTo(new ConstantScoreQuery(new TermQuery(new Term("_field_names", STRING_FIELD_NAME))))); diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 30780d5c49018..d270a8c7113b5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -32,7 +32,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; @@ -139,11 +138,9 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, String expectedFieldName = expectedFieldName(queryBuilder.fieldName()); if (queryBuilder.from() == null && queryBuilder.to() == null) { final Query expectedQuery; - if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { + if (context.mapperService().fullName(queryBuilder.fieldName()).hasDocValues()) { expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(expectedFieldName)); - } else if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) && - context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { + } else if (context.mapperService().fullName(queryBuilder.fieldName()).omitNorms() == false) { expectedQuery = new ConstantScoreQuery(new NormsFieldExistsQuery(expectedFieldName)); } else { expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, expectedFieldName))); @@ -425,8 +422,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC // Range query with open bounds rewrite to an exists query Query luceneQuery = rewrittenRange.toQuery(queryShardContext); final Query expectedQuery; - if (queryShardContext.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0) - && queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) { + if (queryShardContext.fieldMapper(query.fieldName()).hasDocValues()) { expectedQuery = new ConstantScoreQuery(new DocValuesFieldExistsQuery(query.fieldName())); } else { expectedQuery = new ConstantScoreQuery(new TermQuery(new Term(FieldNamesFieldMapper.NAME, query.fieldName()))); diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index daed696f02fd9..0adac9db8287e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -39,7 +39,6 @@ import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.search.SimpleQueryStringQueryParser; @@ -239,9 +238,6 @@ public void testFieldsCannotBeSetToNull() { } public void testDefaultFieldParsing() throws IOException { - assumeTrue("5.x behaves differently, so skip on non-6.x indices", - indexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha1)); - String query = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); String contentString = "{\n" + " \"simple_query_string\" : {\n" + diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 0dccf5937dca7..dd44a386329f9 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -44,7 +44,6 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.Version; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -799,7 +798,7 @@ private static TopFieldDocs search(QueryBuilder queryBuilder, FieldSortBuilder s IndexSearcher searcher) throws IOException { Query query = new BooleanQuery.Builder() .add(queryBuilder.toQuery(queryShardContext), Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER) + .add(Queries.newNonNestedFilter(), Occur.FILTER) .build(); Sort sort = new Sort(sortBuilder.build(queryShardContext).field); return searcher.search(query, 10, sort); diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java index 178df2eac899e..2334cb4330887 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerRetentionLeaseTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.gateway.WriteStateException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; @@ -70,8 +71,7 @@ public void testAddOrRenewRetentionLease() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); final long[] minimumRetainingSequenceNumbers = new long[length]; @@ -112,8 +112,7 @@ public void testAddDuplicateRetentionLease() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final String id = randomAlphaOfLength(8); final long retainingSequenceNumber = randomNonNegativeLong(); @@ -141,8 +140,7 @@ public void testRenewNotFoundRetentionLease() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final String id = randomAlphaOfLength(8); final RetentionLeaseNotFoundException e = expectThrows( @@ -178,8 +176,7 @@ public void testAddRetentionLeaseCausesRetentionLeaseSync() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); @@ -213,8 +210,7 @@ public void testRemoveRetentionLease() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); final long[] minimumRetainingSequenceNumbers = new long[length]; @@ -264,8 +260,7 @@ public void testRemoveNotFound() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final String id = randomAlphaOfLength(8); final RetentionLeaseNotFoundException e = expectThrows( @@ -301,8 +296,7 @@ public void testRemoveRetentionLeaseCausesRetentionLeaseSync() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); @@ -353,8 +347,7 @@ private void runExpirationTest(final boolean primaryMode) { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); if (primaryMode) { replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); } @@ -426,8 +419,7 @@ public void testReplicaIgnoresOlderRetentionLeasesVersion() { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); final int length = randomIntBetween(0, 8); final List retentionLeasesCollection = new ArrayList<>(length); long primaryTerm = 1; @@ -480,8 +472,7 @@ public void testLoadAndPersistRetentionLeases() throws IOException { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); for (int i = 0; i < length; i++) { @@ -499,6 +490,48 @@ public void testLoadAndPersistRetentionLeases() throws IOException { assertThat(replicationTracker.loadRetentionLeases(path), equalTo(replicationTracker.getRetentionLeases())); } + public void testUnnecessaryPersistenceOfRetentionLeases() throws IOException { + final AllocationId allocationId = AllocationId.newInitializing(); + long primaryTerm = randomLongBetween(1, Long.MAX_VALUE); + final ReplicationTracker replicationTracker = new ReplicationTracker( + new ShardId("test", "_na", 0), + allocationId.getId(), + IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), + primaryTerm, + UNASSIGNED_SEQ_NO, + value -> {}, + () -> 0L, + (leases, listener) -> {}); + replicationTracker.updateFromMaster( + randomNonNegativeLong(), + Collections.singleton(allocationId.getId()), + routingTable(Collections.emptySet(), allocationId)); + replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); + final int length = randomIntBetween(0, 8); + for (int i = 0; i < length; i++) { + if (rarely() && primaryTerm < Long.MAX_VALUE) { + primaryTerm = randomLongBetween(primaryTerm + 1, Long.MAX_VALUE); + replicationTracker.setOperationPrimaryTerm(primaryTerm); + } + final long retainingSequenceNumber = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + replicationTracker.addRetentionLease( + Integer.toString(i), retainingSequenceNumber, "test-" + i, ActionListener.wrap(() -> {})); + } + + final Path path = createTempDir(); + replicationTracker.persistRetentionLeases(path); + + final Tuple retentionLeasesWithGeneration = + RetentionLeases.FORMAT.loadLatestStateWithGeneration(logger, NamedXContentRegistry.EMPTY, path); + + replicationTracker.persistRetentionLeases(path); + final Tuple retentionLeasesWithGenerationAfterUnnecessaryPersistence = + RetentionLeases.FORMAT.loadLatestStateWithGeneration(logger, NamedXContentRegistry.EMPTY, path); + + assertThat(retentionLeasesWithGenerationAfterUnnecessaryPersistence.v1(), equalTo(retentionLeasesWithGeneration.v1())); + assertThat(retentionLeasesWithGenerationAfterUnnecessaryPersistence.v2(), equalTo(retentionLeasesWithGeneration.v2())); + } + /** * Test that we correctly synchronize writing the retention lease state file in {@link ReplicationTracker#persistRetentionLeases(Path)}. * This test can fail without the synchronization block in that method. @@ -520,8 +553,7 @@ public void testPersistRetentionLeasesUnderConcurrency() throws IOException { replicationTracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(allocationId.getId()), - routingTable(Collections.emptySet(), allocationId), - Collections.emptySet()); + routingTable(Collections.emptySet(), allocationId)); replicationTracker.activatePrimaryMode(SequenceNumbers.NO_OPS_PERFORMED); final int length = randomIntBetween(0, 8); for (int i = 0; i < length; i++) { diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index 037d2130b5c7b..05ca0a5ea3006 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -64,7 +64,7 @@ import static org.hamcrest.Matchers.not; public class ReplicationTrackerTests extends ReplicationTrackerTestCase { - + public void testEmptyShards() { final ReplicationTracker tracker = newTracker(AllocationId.newInitializing()); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); @@ -86,7 +86,7 @@ private void updateLocalCheckpoint(final ReplicationTracker tracker, final Strin tracker.updateLocalCheckpoint(allocationId, localCheckpoint); assertThat(updatedGlobalCheckpoint.get(), equalTo(tracker.getGlobalCheckpoint())); } - + public void testGlobalCheckpointUpdate() { final long initialClusterStateVersion = randomNonNegativeLong(); Map allocations = new HashMap<>(); @@ -120,7 +120,7 @@ public void testGlobalCheckpointUpdate() { logger.info(" - [{}], local checkpoint [{}], [{}]", aId, allocations.get(aId), type); }); - tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId), emptySet()); + tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1)); initializing.forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); @@ -147,7 +147,7 @@ public void testGlobalCheckpointUpdate() { Set newInitializing = new HashSet<>(initializing); newInitializing.add(extraId); - tracker.updateFromMaster(initialClusterStateVersion + 1, ids(active), routingTable(newInitializing, primaryId), emptySet()); + tracker.updateFromMaster(initialClusterStateVersion + 1, ids(active), routingTable(newInitializing, primaryId)); tracker.initiateTracking(extraId.getId()); @@ -187,7 +187,7 @@ public void testMarkAllocationIdAsInSync() throws BrokenBarrierException, Interr final AllocationId primaryId = active.iterator().next(); final AllocationId replicaId = initializing.iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId), emptySet()); + tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); final long localCheckpoint = randomLongBetween(0, Long.MAX_VALUE - 1); tracker.activatePrimaryMode(localCheckpoint); tracker.initiateTracking(replicaId.getId()); @@ -229,7 +229,7 @@ public void testMissingActiveIdsPreventAdvance() { assigned.putAll(initializing); AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId), emptySet()); + tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); randomSubsetOf(initializing.keySet()).forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); final AllocationId missingActiveID = randomFrom(active.keySet()); @@ -256,7 +256,7 @@ public void testMissingInSyncIdsPreventAdvance() { AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId), emptySet()); + tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); randomSubsetOf(randomIntBetween(1, initializing.size() - 1), initializing.keySet()).forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); @@ -278,7 +278,7 @@ public void testInSyncIdsAreIgnoredIfNotValidatedByMaster() { final Map nonApproved = randomAllocationsWithLocalCheckpoints(1, 5); final AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId), emptySet()); + tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); initializing.keySet().forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); nonApproved.keySet().forEach(k -> @@ -313,7 +313,7 @@ public void testInSyncIdsAreRemovedIfNotValidatedByMaster() { allocations.putAll(initializingToBeRemoved); } final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId), emptySet()); + tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); if (randomBoolean()) { initializingToStay.keySet().forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); @@ -329,16 +329,14 @@ public void testInSyncIdsAreRemovedIfNotValidatedByMaster() { tracker.updateFromMaster( initialClusterStateVersion + 1, ids(activeToStay.keySet()), - routingTable(initializingToStay.keySet(), primaryId), - emptySet()); + routingTable(initializingToStay.keySet(), primaryId)); allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); } else { allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); tracker.updateFromMaster( initialClusterStateVersion + 2, ids(activeToStay.keySet()), - routingTable(initializingToStay.keySet(), primaryId), - emptySet()); + routingTable(initializingToStay.keySet(), primaryId)); } final long checkpoint = Stream.concat(activeToStay.values().stream(), initializingToStay.values().stream()) @@ -357,7 +355,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { final ReplicationTracker tracker = newTracker(inSyncAllocationId); final long clusterStateVersion = randomNonNegativeLong(); tracker.updateFromMaster(clusterStateVersion, Collections.singleton(inSyncAllocationId.getId()), - routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId), emptySet()); + routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId)); tracker.activatePrimaryMode(globalCheckpoint); final Thread thread = new Thread(() -> { try { @@ -397,7 +395,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { } else { // master changes its mind and cancels the allocation tracker.updateFromMaster(clusterStateVersion + 1, Collections.singleton(inSyncAllocationId.getId()), - routingTable(emptySet(), inSyncAllocationId), emptySet()); + routingTable(emptySet(), inSyncAllocationId)); barrier.await(); assertTrue(complete.get()); assertNull(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId())); @@ -405,7 +403,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { assertFalse(tracker.pendingInSync.contains(trackingAllocationId.getId())); thread.join(); } - + private AtomicLong updatedGlobalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO); private ReplicationTracker newTracker(final AllocationId allocationId) { @@ -421,7 +419,7 @@ public void testWaitForAllocationIdToBeInSyncCanBeInterrupted() throws BrokenBar final AllocationId trackingAllocationId = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(inSyncAllocationId); tracker.updateFromMaster(randomNonNegativeLong(), Collections.singleton(inSyncAllocationId.getId()), - routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId), emptySet()); + routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId)); tracker.activatePrimaryMode(globalCheckpoint); final Thread thread = new Thread(() -> { try { @@ -470,7 +468,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { AllocationId primaryId = activeAllocationIds.iterator().next(); IndexShardRoutingTable routingTable = routingTable(initializingIds, primaryId); final ReplicationTracker tracker = newTracker(primaryId); - tracker.updateFromMaster(initialClusterStateVersion, ids(activeAllocationIds), routingTable, emptySet()); + tracker.updateFromMaster(initialClusterStateVersion, ids(activeAllocationIds), routingTable); tracker.activatePrimaryMode(NO_OPS_PERFORMED); assertThat(tracker.getReplicationGroup().getInSyncAllocationIds(), equalTo(ids(activeAllocationIds))); assertThat(tracker.getReplicationGroup().getRoutingTable(), equalTo(routingTable)); @@ -500,7 +498,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { final Set newInitializingAllocationIds = initializingIds.stream().filter(a -> !removingInitializingAllocationIds.contains(a)).collect(Collectors.toSet()); routingTable = routingTable(newInitializingAllocationIds, primaryId); - tracker.updateFromMaster(initialClusterStateVersion + 1, ids(newActiveAllocationIds), routingTable, emptySet()); + tracker.updateFromMaster(initialClusterStateVersion + 1, ids(newActiveAllocationIds), routingTable); assertTrue(newActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue(removingActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()) == null)); assertTrue(newInitializingAllocationIds.stream().noneMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); @@ -517,8 +515,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { tracker.updateFromMaster( initialClusterStateVersion + 2, ids(newActiveAllocationIds), - routingTable(newInitializingAllocationIds, primaryId), - emptySet()); + routingTable(newInitializingAllocationIds, primaryId)); assertTrue(newActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue( newActiveAllocationIds @@ -550,10 +547,10 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { .entrySet() .stream() .allMatch(e -> tracker.getTrackedLocalCheckpointForShard(e.getKey().getId()).getLocalCheckpoint() == e.getValue())); - final long minimumActiveLocalCheckpoint = (long) activeLocalCheckpoints.values().stream().min(Integer::compareTo).get(); + final long minimumActiveLocalCheckpoint = activeLocalCheckpoints.values().stream().min(Integer::compareTo).get(); assertThat(tracker.getGlobalCheckpoint(), equalTo(minimumActiveLocalCheckpoint)); assertThat(updatedGlobalCheckpoint.get(), equalTo(minimumActiveLocalCheckpoint)); - final long minimumInitailizingLocalCheckpoint = (long) initializingLocalCheckpoints.values().stream().min(Integer::compareTo).get(); + final long minimumInitailizingLocalCheckpoint = initializingLocalCheckpoints.values().stream().min(Integer::compareTo).get(); // now we are going to add a new allocation ID and bring it in sync which should move it to the in-sync allocation IDs final long localCheckpoint = @@ -565,8 +562,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { tracker.updateFromMaster( initialClusterStateVersion + 3, ids(newActiveAllocationIds), - routingTable(newInitializingAllocationIds, primaryId), - emptySet()); + routingTable(newInitializingAllocationIds, primaryId)); final CyclicBarrier barrier = new CyclicBarrier(2); final Thread thread = new Thread(() -> { try { @@ -604,8 +600,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { tracker.updateFromMaster( initialClusterStateVersion + 4, ids(newActiveAllocationIds), - routingTable(newInitializingAllocationIds, primaryId), - emptySet()); + routingTable(newInitializingAllocationIds, primaryId)); assertTrue(tracker.getTrackedLocalCheckpointForShard(newSyncingAllocationId.getId()).inSync); assertFalse(tracker.pendingInSync.contains(newSyncingAllocationId.getId())); } @@ -633,8 +628,7 @@ public void testRaceUpdatingGlobalCheckpoint() throws InterruptedException, Brok tracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(active.getId()), - routingTable(Collections.singleton(initializing), active), - emptySet()); + routingTable(Collections.singleton(initializing), active)); tracker.activatePrimaryMode(activeLocalCheckpoint); final int nextActiveLocalCheckpoint = randomIntBetween(activeLocalCheckpoint + 1, Integer.MAX_VALUE); final Thread activeThread = new Thread(() -> { @@ -835,7 +829,7 @@ public void testIllegalStateExceptionIfUnknownAllocationId() { final AllocationId initializing = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(active); tracker.updateFromMaster(randomNonNegativeLong(), Collections.singleton(active.getId()), - routingTable(Collections.singleton(initializing), active), emptySet()); + routingTable(Collections.singleton(initializing), active)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); expectThrows(IllegalStateException.class, () -> tracker.initiateTracking(randomAlphaOfLength(10))); @@ -863,7 +857,7 @@ public Set initializingIds() { } public void apply(ReplicationTracker gcp) { - gcp.updateFromMaster(version, ids(inSyncIds), routingTable, Collections.emptySet()); + gcp.updateFromMaster(version, ids(inSyncIds), routingTable); } } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java index 92d31e305adc7..cb40a0726d42f 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java @@ -356,7 +356,7 @@ public void testRetentionLeasesBackgroundSyncWithSoftDeletesDisabled() throws Ex assertFalse("retention leases background sync must be a noop if soft deletes is disabled", backgroundSyncRequestSent.get()); } - @TestLogging(value = "org.elasticsearch.indices.recovery:trace") + @TestLogging(value = "org.elasticsearch.index:debug,org.elasticsearch.indices.recovery:trace") public void testRetentionLeasesSyncOnRecovery() throws Exception { final int numberOfReplicas = 2 - scaledRandomIntBetween(0, 2); internalCluster().ensureAtLeastNumDataNodes(1 + numberOfReplicas); diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeasesTests.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeasesTests.java index 28444c7825e4d..c63b2ebb6645b 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeasesTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeasesTests.java @@ -60,7 +60,9 @@ public void testSupersedesByPrimaryTerm() { final long higherPrimaryTerm = randomLongBetween(lowerPrimaryTerm + 1, Long.MAX_VALUE); final RetentionLeases right = new RetentionLeases(higherPrimaryTerm, randomLongBetween(1, Long.MAX_VALUE), Collections.emptyList()); assertTrue(right.supersedes(left)); + assertTrue(right.supersedes(left.primaryTerm(), left.version())); assertFalse(left.supersedes(right)); + assertFalse(left.supersedes(right.primaryTerm(), right.version())); } public void testSupersedesByVersion() { @@ -70,7 +72,9 @@ public void testSupersedesByVersion() { final RetentionLeases left = new RetentionLeases(primaryTerm, lowerVersion, Collections.emptyList()); final RetentionLeases right = new RetentionLeases(primaryTerm, higherVersion, Collections.emptyList()); assertTrue(right.supersedes(left)); + assertTrue(right.supersedes(left.primaryTerm(), left.version())); assertFalse(left.supersedes(right)); + assertFalse(left.supersedes(right.primaryTerm(), right.version())); } public void testRetentionLeasesRejectsDuplicates() { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index b34f364bbed2c..0be7b4433fac3 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -603,7 +603,7 @@ public void testPrimaryPromotionRollsGeneration() throws Exception { replicaRouting.allocationId()); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> {}, 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the @@ -654,8 +654,7 @@ public void testOperationPermitsOnPrimaryShards() throws Exception { latch.countDown(); }, 0L, Collections.singleton(indexShard.routingEntry().allocationId().getId()), - new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build(), - Collections.emptySet()); + new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build()); latch.await(); assertThat(indexShard.getActiveOperationsCount(), isOneOf(0, IndexShard.OPERATIONS_BLOCKED)); if (randomBoolean()) { @@ -1140,8 +1139,7 @@ public void onFailure(Exception e) { (s, r) -> resyncLatch.countDown(), 1L, Collections.singleton(newRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(newRouting.shardId()).addShard(newRouting).build(), - Collections.emptySet()); + new IndexShardRoutingTable.Builder(newRouting.shardId()).addShard(newRouting).build()); resyncLatch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index c7d59fdb7c25e..e0825445bb8c2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -99,7 +99,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPendingPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), - new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); + new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build()); shard.updateLocalCheckpointForShard(allocationId, globalCheckPoint); assertEquals(globalCheckPoint, shard.getGlobalCheckpoint()); @@ -159,7 +159,7 @@ public void testSyncerOnClosingShard() throws Exception { String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPendingPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), - new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); + new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build()); CountDownLatch syncCalledLatch = new CountDownLatch(1); PlainActionFuture fut = new PlainActionFuture() { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index f31ac0627138e..6254449df05a6 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -98,19 +98,8 @@ public Map getMetadataMappers() { public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_7_0_0.minimumCompatibilityVersion()); - assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty()); - assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); - Map metadataMapperParsers = - module.getMapperRegistry().getMetadataMapperParsers(version); - assertEquals(EXPECTED_METADATA_FIELDS_6x.length, metadataMapperParsers.size()); - int i = 0; - for (String field : metadataMapperParsers.keySet()) { - assertEquals(EXPECTED_METADATA_FIELDS_6x[i++], field); - } - } - { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), + Version.CURRENT.minimumIndexCompatibilityVersion(), Version.CURRENT); assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty()); assertFalse(module.getMapperRegistry().getMetadataMapperParsers(version).isEmpty()); Map metadataMapperParsers = @@ -127,15 +116,12 @@ public void testBuiltinWithPlugins() { IndicesModule noPluginsModule = new IndicesModule(Collections.emptyList()); IndicesModule module = new IndicesModule(fakePlugins); MapperRegistry registry = module.getMapperRegistry(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_7_0_0.minimumCompatibilityVersion()); assertThat(registry.getMapperParsers().size(), greaterThan(noPluginsModule.getMapperRegistry().getMapperParsers().size())); - assertThat(registry.getMetadataMapperParsers(version).size(), - greaterThan(noPluginsModule.getMapperRegistry().getMetadataMapperParsers(version).size())); - Map metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(version); + assertThat(registry.getMetadataMapperParsers(Version.CURRENT).size(), + greaterThan(noPluginsModule.getMapperRegistry().getMetadataMapperParsers(Version.CURRENT).size())); + Map metadataMapperParsers = + module.getMapperRegistry().getMetadataMapperParsers(Version.CURRENT); Iterator iterator = metadataMapperParsers.keySet().iterator(); - if (version.before(Version.V_7_0_0)) { - assertEquals(AllFieldMapper.NAME, iterator.next()); - } assertEquals(IgnoredFieldMapper.NAME, iterator.next()); String last = null; while(iterator.hasNext()) { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 1ccf858ed1590..4a77160ce36d0 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.cluster.ClusterName; @@ -385,18 +386,18 @@ public void testDanglingIndicesWithAliasConflict() throws Exception { .numberOfShards(1) .numberOfReplicas(0) .build(); - DanglingListener listener = new DanglingListener(); - dangling.allocateDangled(Arrays.asList(indexMetaData), listener); - listener.latch.await(); + CountDownLatch latch = new CountDownLatch(1); + dangling.allocateDangled(Arrays.asList(indexMetaData), ActionListener.wrap(latch::countDown)); + latch.await(); assertThat(clusterService.state(), equalTo(originalState)); // remove the alias client().admin().indices().prepareAliases().removeAlias(indexName, alias).get(); // now try importing a dangling index with the same name as the alias, it should succeed. - listener = new DanglingListener(); - dangling.allocateDangled(Arrays.asList(indexMetaData), listener); - listener.latch.await(); + latch = new CountDownLatch(1); + dangling.allocateDangled(Arrays.asList(indexMetaData), ActionListener.wrap(latch::countDown)); + latch.await(); assertThat(clusterService.state(), not(originalState)); assertNotNull(clusterService.state().getMetaData().index(alias)); } @@ -431,20 +432,6 @@ public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception { indicesService.verifyIndexIsDeleted(tombstonedIndex, clusterState); } - private static class DanglingListener implements LocalAllocateDangledIndices.Listener { - final CountDownLatch latch = new CountDownLatch(1); - - @Override - public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) { - latch.countDown(); - } - - @Override - public void onFailure(Throwable e) { - latch.countDown(); - } - } - /** * Tests that teh {@link MapperService} created by {@link IndicesService#createIndexMapperService(IndexMetaData)} contains * custom types and similarities registered by plugins diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index e8b52fb3bbd2c..80cf443e5007e 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -355,8 +355,7 @@ public void updateShardState(ShardRouting shardRouting, BiConsumer> primaryReplicaSyncer, long applyingClusterStateVersion, Set inSyncAllocationIds, - IndexShardRoutingTable routingTable, - Set pre60AllocationIds) throws IOException { + IndexShardRoutingTable routingTable) throws IOException { failRandomly(); assertThat(this.shardId(), equalTo(shardRouting.shardId())); assertTrue("current: " + this.shardRouting + ", got: " + shardRouting, this.shardRouting.isSameAllocation(shardRouting)); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java index bb1aac89f3e8f..e77bf5f8d4ae5 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java @@ -75,11 +75,7 @@ public void testSerialization() throws Exception { assertThat(outRequest.metadataSnapshot().asMap(), equalTo(inRequest.metadataSnapshot().asMap())); assertThat(outRequest.isPrimaryRelocation(), equalTo(inRequest.isPrimaryRelocation())); assertThat(outRequest.recoveryId(), equalTo(inRequest.recoveryId())); - if (targetNodeVersion.onOrAfter(Version.V_6_0_0_alpha1)) { - assertThat(outRequest.startingSeqNo(), equalTo(inRequest.startingSeqNo())); - } else { - assertThat(SequenceNumbers.UNASSIGNED_SEQ_NO, equalTo(inRequest.startingSeqNo())); - } + assertThat(outRequest.startingSeqNo(), equalTo(inRequest.startingSeqNo())); } } diff --git a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 5fb67a64d9db5..59e7c21a3e6e8 100644 --- a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -62,7 +62,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.util.ArrayList; @@ -1008,7 +1007,6 @@ private void assertCumulativeQueryCacheStats(IndicesStatsResponse response) { assertEquals(total, shardTotal); } - @TestLogging("_root:DEBUG") // this fails at a very low rate on CI: https://github.com/elastic/elasticsearch/issues/32506 public void testFilterCacheStats() throws Exception { Settings settings = Settings.builder().put(indexSettings()).put("number_of_replicas", 0).build(); assertAcked(prepareCreate("index").setSettings(settings).get()); @@ -1033,7 +1031,6 @@ public void testFilterCacheStats() throws Exception { IndicesStatsResponse stats = client().admin().indices().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(stats); assertThat(stats.getTotal().queryCache.getHitCount(), equalTo(0L)); - assertThat(stats.getTotal().queryCache.getEvictions(), equalTo(0L)); assertThat(stats.getTotal().queryCache.getMissCount(), greaterThan(0L)); assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L)); }); @@ -1044,7 +1041,6 @@ public void testFilterCacheStats() throws Exception { IndicesStatsResponse stats = client().admin().indices().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(stats); assertThat(stats.getTotal().queryCache.getHitCount(), greaterThan(0L)); - assertThat(stats.getTotal().queryCache.getEvictions(), equalTo(0L)); assertThat(stats.getTotal().queryCache.getMissCount(), greaterThan(0L)); assertThat(stats.getTotal().queryCache.getCacheSize(), greaterThan(0L)); }); diff --git a/server/src/test/java/org/elasticsearch/indices/stats/LegacyIndexStatsIT.java b/server/src/test/java/org/elasticsearch/indices/stats/LegacyIndexStatsIT.java deleted file mode 100644 index c8ae3edb886ec..0000000000000 --- a/server/src/test/java/org/elasticsearch/indices/stats/LegacyIndexStatsIT.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices.stats; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyIndexStatsIT extends ESIntegTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - public void testFieldDataFieldsParam() { - assertAcked(client() - .admin() - .indices() - .prepareCreate("test1") - .setSettings(Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), Version.V_6_0_0)) - .addMapping("_doc", "bar", "type=text,fielddata=true", "baz", "type=text,fielddata=true") - .get()); - - ensureGreen(); - - client().prepareIndex("test1", "_doc", Integer.toString(1)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); - client().prepareIndex("test1", "_doc", Integer.toString(2)).setSource("{\"bar\":\"bar\",\"baz\":\"baz\"}", XContentType.JSON).get(); - refresh(); - - client().prepareSearch("_all").addSort("bar", SortOrder.ASC).addSort("baz", SortOrder.ASC).execute().actionGet(); - - final IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats(); - - { - final IndicesStatsResponse stats = builder.execute().actionGet(); - assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields(), is(nullValue())); - } - - { - final IndicesStatsResponse stats = builder.setFieldDataFields("bar").execute().actionGet(); - assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(false)); - } - - { - final IndicesStatsResponse stats = builder.setFieldDataFields("bar", "baz").execute().actionGet(); - assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L)); - } - - { - final IndicesStatsResponse stats = builder.setFieldDataFields("*").execute().actionGet(); - assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L)); - } - - { - final IndicesStatsResponse stats = builder.setFieldDataFields("*r").execute().actionGet(); - assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("bar"), is(true)); - assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L)); - assertThat(stats.getTotal().fieldData.getFields().containsField("baz"), is(false)); - } - - } - -} diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index c083928436446..6f0419421b868 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -174,7 +174,6 @@ public void testAwaitCloseTimeoutsOnNonInterruptibleTask() throws Exception { shouldRun.set(false); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/41448") public void testCloseOnInterruptibleTask() throws Exception { Node node = new MockNode(baseSettings().build(), basePlugins()); node.start(); diff --git a/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java b/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java index 5fedfa7869e8b..7ac254f9948f8 100644 --- a/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java @@ -77,9 +77,10 @@ public void testNodeStats() throws Exception { public void testConcurrentAddingAndRemoving() throws Exception { String[] nodes = new String[] {"a", "b", "c", "d"}; - final CountDownLatch latch = new CountDownLatch(1); + final CountDownLatch latch = new CountDownLatch(5); Runnable f = () -> { + latch.countDown(); try { latch.await(); } catch (InterruptedException e) { diff --git a/server/src/test/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java b/server/src/test/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java index aeb4d9b3a9bfb..2ea6567c9f8d0 100644 --- a/server/src/test/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java +++ b/server/src/test/java/org/elasticsearch/persistent/decider/EnableAssignmentDeciderIT.java @@ -23,9 +23,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.persistent.TestPersistentTasksPlugin; import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; @@ -72,17 +70,7 @@ public void testEnableAssignmentAfterRestart() throws Exception { for (int i = 0; i < numberOfTasks; i++) { PersistentTasksService service = internalCluster().getInstance(PersistentTasksService.class); service.sendStartRequest("task_" + i, TestPersistentTasksExecutor.NAME, new TestParams(randomAlphaOfLength(10)), - new ActionListener>() { - @Override - public void onResponse(PersistentTask task) { - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - latch.countDown(); - } - }); + ActionListener.wrap(latch::countDown)); } latch.await(); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoActionTests.java new file mode 100644 index 0000000000000..d757ee095cdc1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoActionTests.java @@ -0,0 +1,143 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction.ALLOWED_METRICS; + +public class RestNodesInfoActionTests extends ESTestCase { + + public void testDuplicatedFiltersAreNotRemoved() { + Map params = new HashMap<>(); + params.put("nodeId", "_all,master:false,_all"); + + RestRequest restRequest = buildRestRequest(params); + NodesInfoRequest actual = RestNodesInfoAction.prepareRequest(restRequest); + assertArrayEquals(new String[] { "_all", "master:false", "_all" }, actual.nodesIds()); + } + + public void testOnlyMetrics() { + Map params = new HashMap<>(); + int metricsCount = randomIntBetween(1, ALLOWED_METRICS.size()); + List metrics = new ArrayList<>(); + + for(int i = 0; i < metricsCount; i++) { + metrics.add(randomFrom(ALLOWED_METRICS)); + } + params.put("nodeId", String.join(",", metrics)); + + RestRequest restRequest = buildRestRequest(params); + NodesInfoRequest actual = RestNodesInfoAction.prepareRequest(restRequest); + assertArrayEquals(new String[] { "_all" }, actual.nodesIds()); + assertMetrics(metrics, actual); + } + + public void testAllMetricsSelectedWhenNodeAndMetricSpecified() { + Map params = new HashMap<>(); + String nodeId = randomValueOtherThanMany(ALLOWED_METRICS::contains, () -> randomAlphaOfLength(23)); + String metric = randomFrom(ALLOWED_METRICS); + + params.put("nodeId", nodeId + "," + metric); + RestRequest restRequest = buildRestRequest(params); + + NodesInfoRequest actual = RestNodesInfoAction.prepareRequest(restRequest); + assertArrayEquals(new String[] { nodeId, metric }, actual.nodesIds()); + assertAllMetricsTrue(actual); + } + + public void testSeparateNodeIdsAndMetrics() { + Map params = new HashMap<>(); + List nodeIds = new ArrayList<>(5); + List metrics = new ArrayList<>(5); + + for(int i = 0; i < 5; i++) { + nodeIds.add(randomValueOtherThanMany(ALLOWED_METRICS::contains, () -> randomAlphaOfLength(23))); + metrics.add(randomFrom(ALLOWED_METRICS)); + } + + params.put("nodeId", String.join(",", nodeIds)); + params.put("metrics", String.join(",", metrics)); + RestRequest restRequest = buildRestRequest(params); + + NodesInfoRequest actual = RestNodesInfoAction.prepareRequest(restRequest); + assertArrayEquals(nodeIds.toArray(), actual.nodesIds()); + assertMetrics(metrics, actual); + } + + public void testExplicitAllMetrics() { + Map params = new HashMap<>(); + List nodeIds = new ArrayList<>(5); + + for(int i = 0; i < 5; i++) { + nodeIds.add(randomValueOtherThanMany(ALLOWED_METRICS::contains, () -> randomAlphaOfLength(23))); + } + + params.put("nodeId", String.join(",", nodeIds)); + params.put("metrics", "_all"); + RestRequest restRequest = buildRestRequest(params); + + NodesInfoRequest actual = RestNodesInfoAction.prepareRequest(restRequest); + assertArrayEquals(nodeIds.toArray(), actual.nodesIds()); + assertAllMetricsTrue(actual); + } + + private FakeRestRequest buildRestRequest(Map params) { + return new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.GET) + .withPath("/_nodes") + .withParams(params) + .build(); + } + + private void assertMetrics(List metrics, NodesInfoRequest nodesInfoRequest) { + assertTrue((metrics.contains("http") && nodesInfoRequest.http()) || metrics.contains("http") == false); + assertTrue((metrics.contains("ingest") && nodesInfoRequest.ingest()) || metrics.contains("ingest") == false); + assertTrue((metrics.contains("indices") && nodesInfoRequest.indices()) || metrics.contains("indices") == false); + assertTrue((metrics.contains("jvm") && nodesInfoRequest.jvm()) || metrics.contains("jvm") == false); + assertTrue((metrics.contains("os") && nodesInfoRequest.os()) || metrics.contains("os") == false); + assertTrue((metrics.contains("plugins") && nodesInfoRequest.plugins()) || metrics.contains("plugins") == false); + assertTrue((metrics.contains("process") && nodesInfoRequest.process()) || metrics.contains("process") == false); + assertTrue((metrics.contains("settings") && nodesInfoRequest.settings()) || metrics.contains("settings") == false); + assertTrue((metrics.contains("thread_pool") && nodesInfoRequest.threadPool()) || metrics.contains("thread_pool") == false); + assertTrue((metrics.contains("transport") && nodesInfoRequest.transport()) || metrics.contains("transport") == false); + } + + private void assertAllMetricsTrue(NodesInfoRequest nodesInfoRequest) { + assertTrue(nodesInfoRequest.http()); + assertTrue(nodesInfoRequest.ingest()); + assertTrue(nodesInfoRequest.indices()); + assertTrue(nodesInfoRequest.jvm()); + assertTrue(nodesInfoRequest.os()); + assertTrue(nodesInfoRequest.plugins()); + assertTrue(nodesInfoRequest.process()); + assertTrue(nodesInfoRequest.settings()); + assertTrue(nodesInfoRequest.threadPool()); + assertTrue(nodesInfoRequest.transport()); + } +} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java deleted file mode 100644 index dd98089246b51..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsActionTests.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestRequest.Method; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.junit.Before; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -public class RestMultiTermVectorsActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestMultiTermVectorsAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.POST) - .withPath("/some_index/some_type/_mtermvectors") - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.GET) - .withPath("/some_index/_mtermvectors") - .withParams(params) - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder().startObject() - .startArray("docs") - .startObject() - .field("_type", "some_type") - .field("_id", 1) - .endObject() - .endArray() - .endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.GET) - .withPath("/some_index/_mtermvectors") - .withContent(BytesReference.bytes(content), XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java deleted file mode 100644 index d93f7749f63e3..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestTermVectorsActionTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.document; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestRequest.Method; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.junit.Before; - -import java.io.IOException; - -public class RestTermVectorsActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestTermVectorsAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.POST) - .withPath("/some_index/some_type/some_id/_termvectors") - .build(); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder().startObject() - .field("_type", "some_type") - .field("_id", 1) - .endObject(); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(Method.GET) - .withPath("/some_index/_termvectors/some_id") - .withContent(BytesReference.bytes(content), XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java deleted file mode 100644 index 3b80d2002c5c5..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestMultiSearchActionTests.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.junit.Before; - -import java.nio.charset.StandardCharsets; - -public class RestMultiSearchActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestMultiSearchAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - String content = "{ \"index\": \"some_index\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_msearch") - .withContent(bytesContent, XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeInBody() { - String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n {} \n"; - BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8)); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.POST) - .withPath("/some_index/_msearch") - .withContent(bytesContent, XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestMultiSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java deleted file mode 100644 index 522d04b37c663..0000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -public class RestSearchActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestSearchAction(Settings.EMPTY, controller()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_search") - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/_search") - .withParams(params) - .build(); - - dispatchRequest(request); - assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index aa244ff7a320b..e209a65cf3629 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -132,7 +132,7 @@ private void writeToAndReadFrom(InternalAggregations aggregations, int iteration aggregations.writeTo(out); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytesRef().bytes), registry)) { in.setVersion(version); - InternalAggregations deserialized = InternalAggregations.readAggregations(in); + InternalAggregations deserialized = new InternalAggregations(in); assertEquals(aggregations.aggregations, deserialized.aggregations); if (aggregations.getTopLevelPipelineAggregators() == null) { assertEquals(0, deserialized.getTopLevelPipelineAggregators().size()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java index 4c7fdccb64b00..aa1ff6f55af82 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java @@ -92,7 +92,9 @@ public void testFiltersSortedByKey() { public void testOtherBucket() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters").endArray(); + builder.startArray("filters") + .startObject().startObject("term").field("field", "foo").endObject().endObject() + .endArray(); builder.endObject(); try (XContentParser parser = createParser(shuffleXContent(builder))) { parser.nextToken(); @@ -102,7 +104,9 @@ public void testOtherBucket() throws IOException { builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters").endArray(); + builder.startArray("filters") + .startObject().startObject("term").field("field", "foo").endObject().endObject() + .endArray(); builder.field("other_bucket_key", "some_key"); builder.endObject(); } @@ -114,7 +118,9 @@ public void testOtherBucket() throws IOException { builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); - builder.startArray("filters").endArray(); + builder.startArray("filters") + .startObject().startObject("term").field("field", "foo").endObject().endObject() + .endArray(); builder.field("other_bucket", false); builder.field("other_bucket_key", "some_key"); builder.endObject(); @@ -192,4 +198,30 @@ public void testRewritePreservesOtherBucket() throws IOException { assertEquals(originalFilters.otherBucket(), rewrittenFilters.otherBucket()); assertEquals(originalFilters.otherBucketKey(), rewrittenFilters.otherBucketKey()); } + + public void testEmptyFilters() throws IOException { + { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); // unkeyed array + builder.endObject(); + XContentParser parser = createParser(shuffleXContent(builder)); + parser.nextToken(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> FiltersAggregationBuilder.parse("agg_name", parser)); + assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); + } + + { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startObject("filters").endObject(); // keyed object + builder.endObject(); + XContentParser parser = createParser(shuffleXContent(builder)); + parser.nextToken(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> FiltersAggregationBuilder.parse("agg_name", parser)); + assertThat(e.getMessage(), equalTo("[filters] cannot be empty.")); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index f671b21eb5e9b..17581b9458413 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -1097,6 +1097,16 @@ public void testLegacyThenNew() throws IOException { assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); } + public void testIllegalInterval() throws IOException { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), + Collections.emptyList(), + aggregation -> aggregation.dateHistogramInterval(new DateHistogramInterval("foobar")).field(DATE_FIELD), + histogram -> {} + )); + assertThat(e.getMessage(), equalTo("Unable to parse interval [foobar]")); + assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); + } + private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 96d66c9e0c269..f3c1ea7ca529a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -57,15 +56,14 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.InternalSum; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalSum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -356,7 +354,7 @@ public void testResetRootDocId() throws Exception { fieldType.setName(VALUE_FIELD_NAME); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(Queries.newNonNestedFilter(VersionUtils.randomVersion(random())), BooleanClause.Occur.MUST); + bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); InternalNested nested = search(newSearcher(indexReader, false, true), @@ -638,7 +636,7 @@ public void testPreGetChildLeafCollectors() throws IOException { fieldType2.setHasDocValues(true); Filter filter = search(newSearcher(indexReader, false, true), - Queries.newNonNestedFilter(Version.CURRENT), filterAggregationBuilder, fieldType1, fieldType2); + Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2); assertEquals("filterAgg", filter.getName()); assertEquals(3L, filter.getDocCount()); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index a5061c35d7fcf..582e1caa7ce87 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -353,11 +353,23 @@ public void testTerminateAfterEarlyTermination() throws Exception { TestSearchContext context = new TestSearchContext(null, indexShard); context.setTask(new SearchTask(123L, "", "", "", null, Collections.emptyMap())); context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); - context.terminateAfter(1); final IndexReader reader = DirectoryReader.open(dir); IndexSearcher contextSearcher = new IndexSearcher(reader); + context.terminateAfter(numDocs); + { + context.setSize(10); + TotalHitCountCollector collector = new TotalHitCountCollector(); + context.queryCollectors().put(TotalHitCountCollector.class, collector); + QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); + assertFalse(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); + assertThat(collector.getTotalHits(), equalTo(numDocs)); + } + + context.terminateAfter(1); { context.setSize(1); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); @@ -419,7 +431,6 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); assertThat(collector.getTotalHits(), equalTo(1)); } - reader.close(); dir.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index 64712b3e417a0..efbe858e1899a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -74,7 +74,7 @@ private static QuerySearchResult createTestInstance() throws Exception { public void testSerialization() throws Exception { QuerySearchResult querySearchResult = createTestInstance(); Version version = VersionUtils.randomVersion(random()); - QuerySearchResult deserialized = copyStreamable(querySearchResult, namedWriteableRegistry, QuerySearchResult::new, version); + QuerySearchResult deserialized = copyWriteable(querySearchResult, namedWriteableRegistry, QuerySearchResult::new, version); assertEquals(querySearchResult.getRequestId(), deserialized.getRequestId()); assertNull(deserialized.getSearchShardTarget()); assertEquals(querySearchResult.topDocs().maxScore, deserialized.topDocs().maxScore, 0f); diff --git a/server/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 3e546d1720281..74b0408636f9b 100644 --- a/server/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -250,7 +250,7 @@ public void testSimpleTerminateAfterCount() throws Exception { .setTerminateAfter(2 * max).get(); assertHitCount(searchResponse, max); - assertNull(searchResponse.isTerminatedEarly()); + assertFalse(searchResponse.isTerminatedEarly()); } public void testSimpleIndexSortEarlyTerminate() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java index 08e0c6fdcfaed..cdb2ef3ce2dde 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java +++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java @@ -37,11 +37,6 @@ public BlobContainer blobContainer(BlobPath path) { return delegate.blobContainer(path); } - @Override - public void delete(BlobPath path) throws IOException { - delegate.delete(path); - } - @Override public void close() throws IOException { delegate.close(); diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index 4519513db2812..17106508ae71a 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -19,14 +19,26 @@ package org.elasticsearch.transport; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.hamcrest.Matcher; import java.io.IOException; import java.io.StreamCorruptedException; +import java.net.InetSocketAddress; +import java.util.Collections; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.core.IsInstanceOf.instanceOf; /** Unit tests for {@link TcpTransport} */ @@ -34,50 +46,26 @@ public class TcpTransportTests extends ESTestCase { /** Test ipv4 host with a default port works */ public void testParseV4DefaultPort() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("127.0.0.1", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1", 1234); assertEquals(1, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); assertEquals(1234, addresses[0].getPort()); } - /** Test ipv4 host with a default port range works */ - public void testParseV4DefaultRange() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("127.0.0.1", "1234-1235", Integer.MAX_VALUE); - assertEquals(2, addresses.length); - - assertEquals("127.0.0.1", addresses[0].getAddress()); - assertEquals(1234, addresses[0].getPort()); - - assertEquals("127.0.0.1", addresses[1].getAddress()); - assertEquals(1235, addresses[1].getPort()); - } - /** Test ipv4 host with port works */ public void testParseV4WithPort() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("127.0.0.1:2345", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1:2345", 1234); assertEquals(1, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); assertEquals(2345, addresses[0].getPort()); } - /** Test ipv4 host with port range works */ - public void testParseV4WithPortRange() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("127.0.0.1:2345-2346", "1234", Integer.MAX_VALUE); - assertEquals(2, addresses.length); - - assertEquals("127.0.0.1", addresses[0].getAddress()); - assertEquals(2345, addresses[0].getPort()); - - assertEquals("127.0.0.1", addresses[1].getAddress()); - assertEquals(2346, addresses[1].getPort()); - } - /** Test unbracketed ipv6 hosts in configuration fail. Leave no ambiguity */ public void testParseV6UnBracketed() throws Exception { try { - TcpTransport.parse("::1", "1234", Integer.MAX_VALUE); + TcpTransport.parse("::1", 1234); fail("should have gotten exception"); } catch (IllegalArgumentException expected) { assertTrue(expected.getMessage().contains("must be bracketed")); @@ -86,53 +74,117 @@ public void testParseV6UnBracketed() throws Exception { /** Test ipv6 host with a default port works */ public void testParseV6DefaultPort() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("[::1]", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]", 1234); assertEquals(1, addresses.length); assertEquals("::1", addresses[0].getAddress()); assertEquals(1234, addresses[0].getPort()); } - /** Test ipv6 host with a default port range works */ - public void testParseV6DefaultRange() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("[::1]", "1234-1235", Integer.MAX_VALUE); - assertEquals(2, addresses.length); - - assertEquals("::1", addresses[0].getAddress()); - assertEquals(1234, addresses[0].getPort()); - - assertEquals("::1", addresses[1].getAddress()); - assertEquals(1235, addresses[1].getPort()); - } - /** Test ipv6 host with port works */ public void testParseV6WithPort() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("[::1]:2345", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]:2345", 1234); assertEquals(1, addresses.length); assertEquals("::1", addresses[0].getAddress()); assertEquals(2345, addresses[0].getPort()); } - /** Test ipv6 host with port range works */ - public void testParseV6WithPortRange() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("[::1]:2345-2346", "1234", Integer.MAX_VALUE); - assertEquals(2, addresses.length); + public void testRejectsPortRanges() { + expectThrows( + NumberFormatException.class, + () -> TcpTransport.parse("[::1]:100-200", 1000) + ); + } - assertEquals("::1", addresses[0].getAddress()); - assertEquals(2345, addresses[0].getPort()); + public void testDefaultSeedAddressesWithDefaultPort() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder( + "[::1]:9300", "[::1]:9301", "[::1]:9302", "[::1]:9303", "[::1]:9304", "[::1]:9305", + "127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302", "127.0.0.1:9303", "127.0.0.1:9304", "127.0.0.1:9305") : + containsInAnyOrder( + "127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302", "127.0.0.1:9303", "127.0.0.1:9304", "127.0.0.1:9305"); + testDefaultSeedAddresses(Settings.EMPTY, seedAddressMatcher); + } + + public void testDefaultSeedAddressesWithNonstandardGlobalPortRange() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder( + "[::1]:9500", "[::1]:9501", "[::1]:9502", "[::1]:9503", "[::1]:9504", "[::1]:9505", + "127.0.0.1:9500", "127.0.0.1:9501", "127.0.0.1:9502", "127.0.0.1:9503", "127.0.0.1:9504", "127.0.0.1:9505") : + containsInAnyOrder( + "127.0.0.1:9500", "127.0.0.1:9501", "127.0.0.1:9502", "127.0.0.1:9503", "127.0.0.1:9504", "127.0.0.1:9505"); + testDefaultSeedAddresses(Settings.builder().put(TransportSettings.PORT.getKey(), "9500-9600").build(), seedAddressMatcher); + } + + public void testDefaultSeedAddressesWithSmallGlobalPortRange() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder("[::1]:9300", "[::1]:9301", "[::1]:9302", "127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302") : + containsInAnyOrder("127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302"); + testDefaultSeedAddresses(Settings.builder().put(TransportSettings.PORT.getKey(), "9300-9302").build(), seedAddressMatcher); + } - assertEquals("::1", addresses[1].getAddress()); - assertEquals(2346, addresses[1].getPort()); + public void testDefaultSeedAddressesWithNonstandardProfilePortRange() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder("[::1]:9500", "[::1]:9501", "[::1]:9502", "[::1]:9503", "[::1]:9504", "[::1]:9505", + "127.0.0.1:9500", "127.0.0.1:9501", "127.0.0.1:9502", "127.0.0.1:9503", "127.0.0.1:9504", "127.0.0.1:9505") : + containsInAnyOrder("127.0.0.1:9500", "127.0.0.1:9501", "127.0.0.1:9502", "127.0.0.1:9503", "127.0.0.1:9504", "127.0.0.1:9505"); + testDefaultSeedAddresses(Settings.builder() + .put(TransportSettings.PORT_PROFILE.getConcreteSettingForNamespace(TransportSettings.DEFAULT_PROFILE).getKey(), "9500-9600") + .build(), seedAddressMatcher); } - /** Test per-address limit */ - public void testAddressLimit() throws Exception { - TransportAddress[] addresses = TcpTransport.parse("[::1]:100-200", "1000", 3); - assertEquals(3, addresses.length); - assertEquals(100, addresses[0].getPort()); - assertEquals(101, addresses[1].getPort()); - assertEquals(102, addresses[2].getPort()); + public void testDefaultSeedAddressesWithSmallProfilePortRange() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder("[::1]:9300", "[::1]:9301", "[::1]:9302", "127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302") : + containsInAnyOrder("127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302"); + testDefaultSeedAddresses(Settings.builder() + .put(TransportSettings.PORT_PROFILE.getConcreteSettingForNamespace(TransportSettings.DEFAULT_PROFILE).getKey(), "9300-9302") + .build(), seedAddressMatcher); + } + + public void testDefaultSeedAddressesPrefersProfileSettingToGlobalSetting() { + final Matcher> seedAddressMatcher = NetworkUtils.SUPPORTS_V6 ? + containsInAnyOrder("[::1]:9300", "[::1]:9301", "[::1]:9302", "127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302") : + containsInAnyOrder("127.0.0.1:9300", "127.0.0.1:9301", "127.0.0.1:9302"); + testDefaultSeedAddresses(Settings.builder() + .put(TransportSettings.PORT_PROFILE.getConcreteSettingForNamespace(TransportSettings.DEFAULT_PROFILE).getKey(), "9300-9302") + .put(TransportSettings.PORT.getKey(), "9500-9600") + .build(), seedAddressMatcher); + } + + public void testDefaultSeedAddressesWithNonstandardSinglePort() { + testDefaultSeedAddresses(Settings.builder().put(TransportSettings.PORT.getKey(), "9500").build(), + NetworkUtils.SUPPORTS_V6 ? containsInAnyOrder("[::1]:9500", "127.0.0.1:9500") : containsInAnyOrder("127.0.0.1:9500")); + } + + private void testDefaultSeedAddresses(final Settings settings, Matcher> seedAddressesMatcher) { + final TestThreadPool testThreadPool = new TestThreadPool("test"); + try { + final TcpTransport tcpTransport = new TcpTransport(settings, Version.CURRENT, testThreadPool, + new MockPageCacheRecycler(settings), + new NoneCircuitBreakerService(), writableRegistry(), new NetworkService(Collections.emptyList())) { + + @Override + protected TcpServerChannel bind(String name, InetSocketAddress address) { + throw new UnsupportedOperationException(); + } + + @Override + protected TcpChannel initiateChannel(DiscoveryNode node) { + throw new UnsupportedOperationException(); + } + + @Override + protected void stopInternal() { + throw new UnsupportedOperationException(); + } + }; + + assertThat(tcpTransport.getDefaultSeedAddresses(), seedAddressesMatcher); + } finally { + testThreadPool.shutdown(); + } } public void testDecodeWithIncompleteHeader() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java index ac58b0e25b91e..dd46059aa2abf 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java @@ -66,7 +66,7 @@ public void testLoggingHandler() throws IOException { ", action: cluster:monitor/stats]" + " WRITE: \\d+B"; final MockLogAppender.LoggingExpectation writeExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); final String readPattern = @@ -78,7 +78,7 @@ public void testLoggingHandler() throws IOException { " READ: \\d+B"; final MockLogAppender.LoggingExpectation readExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "cluster monitor request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); appender.addExpectation(writeExpectation); diff --git a/server/src/test/resources/org/elasticsearch/env/testReadsFormatWithoutVersion.binary b/server/src/test/resources/org/elasticsearch/env/testReadsFormatWithoutVersion.binary new file mode 100644 index 0000000000000..3a8bb297e7449 Binary files /dev/null and b/server/src/test/resources/org/elasticsearch/env/testReadsFormatWithoutVersion.binary differ diff --git a/settings.gradle b/settings.gradle index 4e6c59116e9ab..e0650f618ddf4 100644 --- a/settings.gradle +++ b/settings.gradle @@ -112,6 +112,8 @@ if (isEclipse) { projects << 'libs:grok-tests' projects << 'libs:geo-tests' projects << 'libs:ssl-config-tests' + projects << 'client:rest-high-level-tests' + projects << 'x-pack:plugin:core-tests' } include projects.toArray(new String[0]) @@ -155,6 +157,15 @@ if (isEclipse) { project(":libs:ssl-config").buildFileName = 'eclipse-build.gradle' project(":libs:ssl-config-tests").projectDir = new File(rootProject.projectDir, 'libs/ssl-config/src/test') project(":libs:ssl-config-tests").buildFileName = 'eclipse-build.gradle' + project(":client:rest-high-level").projectDir = new File(rootProject.projectDir, 'client/rest-high-level/src/main') + project(":client:rest-high-level").buildFileName = 'eclipse-build.gradle' + project(":client:rest-high-level-tests").projectDir = new File(rootProject.projectDir, 'client/rest-high-level/src/test') + project(":client:rest-high-level-tests").buildFileName = 'eclipse-build.gradle' + project(":x-pack:plugin:core").projectDir = new File(rootProject.projectDir, 'x-pack/plugin/core/src/main') + project(":x-pack:plugin:core").buildFileName = 'eclipse-build.gradle' + project(":x-pack:plugin:core-tests").projectDir = new File(rootProject.projectDir, 'x-pack/plugin/core/src/test') + project(":x-pack:plugin:core-tests").buildFileName = 'eclipse-build.gradle' + } // look for extra plugins for elasticsearch diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 7090419e9f23d..ced2a7bff78d7 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -60,9 +61,9 @@ long appliedOperations() { } public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings indexSettings) { - NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); - IndexAnalyzers indexAnalyzers = - new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultAnalyzer, defaultAnalyzer, emptyMap(), emptyMap(), emptyMap()); + Map analyzers = new HashMap<>(); + analyzers.put(AnalysisRegistry.DEFAULT_ANALYZER_NAME, new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer())); + IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, analyzers, emptyMap(), emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, null, emptyMap()); MapperRegistry mapperRegistry = new IndicesModule(emptyList()).getMapperRegistry(); mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 0063f2a6d9b0c..b11a0f84fb84a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -293,7 +293,7 @@ public void startPrimary() throws IOException { ShardRouting startedRoutingEntry = ShardRoutingHelper.moveToStarted(primary.routingEntry()); IndexShardRoutingTable routingTable = routingTable(shr -> shr == primary.routingEntry() ? startedRoutingEntry : shr); primary.updateShardState(startedRoutingEntry, primary.getPendingPrimaryTerm(), null, - currentClusterStateVersion.incrementAndGet(), activeIds, routingTable, Collections.emptySet()); + currentClusterStateVersion.incrementAndGet(), activeIds, routingTable); for (final IndexShard replica : replicas) { recoverReplica(replica); } @@ -385,7 +385,7 @@ public synchronized void promoteReplicaToPrimary(IndexShard replica, IndexShardRoutingTable routingTable = routingTable(shr -> shr == replica.routingEntry() ? primaryRouting : shr); primary.updateShardState(primaryRouting, newTerm, primaryReplicaSyncer, currentClusterStateVersion.incrementAndGet(), - activeIds(), routingTable, Collections.emptySet()); + activeIds(), routingTable); } private synchronized Set activeIds() { @@ -520,7 +520,7 @@ private void updateAllocationIDsOnPrimary() throws IOException { primary.updateShardState(primary.routingEntry(), primary.getPendingPrimaryTerm(), null, currentClusterStateVersion.incrementAndGet(), - activeIds(), routingTable(Function.identity()), Collections.emptySet()); + activeIds(), routingTable(Function.identity())); } private synchronized void computeReplicationTargets() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 105ec5415d686..6175a22760029 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -548,7 +548,7 @@ public static void updateRoutingEntry(IndexShard shard, ShardRouting shardRoutin .addShard(shardRouting) .build(); shard.updateShardState(shardRouting, shard.getPendingPrimaryTerm(), null, currentClusterStateVersion.incrementAndGet(), - inSyncIds, newRoutingTable, Collections.emptySet()); + inSyncIds, newRoutingTable); } protected void recoveryEmptyReplica(IndexShard replica, boolean startReplica) throws IOException { @@ -633,7 +633,7 @@ protected final void recoverUnstartedReplica(final IndexShard replica, new AsyncRecoveryTarget(recoveryTarget, threadPool.generic()), request, Math.toIntExact(ByteSizeUnit.MB.toBytes(1)), between(1, 8)); primary.updateShardState(primary.routingEntry(), primary.getPendingPrimaryTerm(), null, - currentClusterStateVersion.incrementAndGet(), inSyncIds, routingTable, Collections.emptySet()); + currentClusterStateVersion.incrementAndGet(), inSyncIds, routingTable); PlainActionFuture future = new PlainActionFuture<>(); recovery.recoverToTarget(future); @@ -658,9 +658,9 @@ protected void startReplicaAfterRecovery(IndexShard replica, IndexShard primary, inSyncIdsWithReplica.add(replica.routingEntry().allocationId().getId()); // update both primary and replica shard state primary.updateShardState(primary.routingEntry(), primary.getPendingPrimaryTerm(), null, - currentClusterStateVersion.incrementAndGet(), inSyncIdsWithReplica, newRoutingTable, Collections.emptySet()); + currentClusterStateVersion.incrementAndGet(), inSyncIdsWithReplica, newRoutingTable); replica.updateShardState(replica.routingEntry().moveToStarted(), replica.getPendingPrimaryTerm(), null, - currentClusterStateVersion.get(), inSyncIdsWithReplica, newRoutingTable, Collections.emptySet()); + currentClusterStateVersion.get(), inSyncIdsWithReplica, newRoutingTable); } @@ -685,7 +685,7 @@ protected void promoteReplica(IndexShard replica, Set inSyncIds, IndexSh (is, listener) -> listener.onResponse(new PrimaryReplicaSyncer.ResyncTask(1, "type", "action", "desc", null, Collections.emptyMap())), currentClusterStateVersion.incrementAndGet(), - inSyncIds, newRoutingTable, Collections.emptySet()); + inSyncIds, newRoutingTable); } private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java index ccc38ae362991..a32d841927360 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java @@ -49,9 +49,6 @@ public void testContainerCreationAndDeletion() throws IOException { assertTrue(containerFoo.blobExists("test")); assertTrue(containerBar.blobExists("test")); - store.delete(new BlobPath()); - assertFalse(containerFoo.blobExists("test")); - assertFalse(containerBar.blobExists("test")); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 22a12e74c3516..c6a5d77faf5f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -159,7 +159,7 @@ public boolean innerMatch(final LogEvent event) { } - public static class PatternSeenEventExcpectation implements LoggingExpectation { + public static class PatternSeenEventExpectation implements LoggingExpectation { protected final String name; protected final String logger; @@ -167,7 +167,7 @@ public static class PatternSeenEventExcpectation implements LoggingExpectation { protected final String pattern; volatile boolean saw; - public PatternSeenEventExcpectation(String name, String logger, Level level, String pattern) { + public PatternSeenEventExpectation(String name, String logger, Level level, String pattern) { this.name = name; this.logger = logger; this.level = level; diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 990ae8e1f09a2..8bfa0becaee94 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -255,4 +255,14 @@ public static Version maxCompatibleVersion(Version version) { public static Version randomIndexCompatibleVersion(Random random) { return randomVersionBetween(random, Version.CURRENT.minimumIndexCompatibilityVersion(), Version.CURRENT); } + + /** + * Returns a random version index compatible with the given version, but not the given version. + */ + public static Version randomPreviousCompatibleVersion(Random random, Version version) { + // TODO: change this to minimumCompatibilityVersion(), but first need to remove released/unreleased + // versions so getPreviousVerison returns the *actual* previous version. Otherwise eg 8.0.0 returns say 7.0.2 for previous, + // but 7.2.0 for minimum compat + return randomVersionBetween(random, version.minimumIndexCompatibilityVersion(), getPreviousVersion(version)); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 4ab19e833fa47..8086289127ece 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -208,7 +208,7 @@ public Map profileBoundAddresses() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) { + public TransportAddress[] addressesFromString(String address) { return new TransportAddress[0]; } @@ -238,7 +238,7 @@ public void close() { } @Override - public List getLocalAddresses() { + public List getDefaultSeedAddresses() { return Collections.emptyList(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java index 1f29739d6284d..d812fdffe9673 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableTransport.java @@ -118,13 +118,13 @@ public BoundTransportAddress boundAddress() { } @Override - public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException { - return delegate.addressesFromString(address, perAddressLimit); + public TransportAddress[] addressesFromString(String address) throws UnknownHostException { + return delegate.addressesFromString(address); } @Override - public List getLocalAddresses() { - return delegate.getLocalAddresses(); + public List getDefaultSeedAddresses() { + return delegate.getDefaultSeedAddresses(); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 960d25c960946..783fe77b9bf9c 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -1053,19 +1053,19 @@ public String executor() { final String requestSent = ".*\\[internal:test].*sent to.*\\{TS_B}.*"; final MockLogAppender.LoggingExpectation requestSentExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "sent request", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, requestSent); final String requestReceived = ".*\\[internal:test].*received request.*"; final MockLogAppender.LoggingExpectation requestReceivedExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "received request", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, requestReceived); final String responseSent = ".*\\[internal:test].*sent response.*"; final MockLogAppender.LoggingExpectation responseSentExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "sent response", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, responseSent); final String responseReceived = ".*\\[internal:test].*received response from.*\\{TS_B}.*"; final MockLogAppender.LoggingExpectation responseReceivedExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "received response", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, responseReceived); appender.addExpectation(requestSentExpectation); @@ -1080,12 +1080,12 @@ public String executor() { final String errorResponseSent = ".*\\[internal:testError].*sent error response.*"; final MockLogAppender.LoggingExpectation errorResponseSentExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "sent error response", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, errorResponseSent); final String errorResponseReceived = ".*\\[internal:testError].*received response from.*\\{TS_B}.*"; final MockLogAppender.LoggingExpectation errorResponseReceivedExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "received error response", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, errorResponseReceived); appender.addExpectation(errorResponseSentExpectation); @@ -1101,7 +1101,7 @@ public String executor() { "not seen request sent", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, notSeenSent); final String notSeenReceived = ".*\\[internal:testNotSeen].*received request.*"; final MockLogAppender.LoggingExpectation notSeenReceivedExpectation = - new MockLogAppender.PatternSeenEventExcpectation( + new MockLogAppender.PatternSeenEventExpectation( "not seen request received", "org.elasticsearch.transport.TransportService.tracer", Level.TRACE, notSeenReceived); appender.addExpectation(notSeenSentExpectation); diff --git a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java index 2395fcc2484e9..cab910ad6c430 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/VersionUtilsTests.java @@ -31,6 +31,7 @@ import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.Version.fromId; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -62,22 +63,21 @@ public void testRandomVersionBetween() { assertTrue(got.onOrBefore(Version.CURRENT)); // sub range - got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, - Version.V_6_2_4); - assertTrue(got.onOrAfter(Version.V_6_0_0_alpha1)); - assertTrue(got.onOrBefore(Version.V_6_2_4)); + got = VersionUtils.randomVersionBetween(random(), fromId(7000099), fromId(7010099)); + assertTrue(got.onOrAfter(fromId(7000099))); + assertTrue(got.onOrBefore(fromId(7010099))); // unbounded lower - got = VersionUtils.randomVersionBetween(random(), null, Version.V_6_0_0_beta1); + got = VersionUtils.randomVersionBetween(random(), null, fromId(7000099)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); - assertTrue(got.onOrBefore(Version.V_6_0_0_beta1)); + assertTrue(got.onOrBefore(fromId(7000099))); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0)); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0))); // unbounded upper - got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, null); - assertTrue(got.onOrAfter(Version.V_6_0_0)); + got = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), null); + assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrBefore(Version.CURRENT)); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null); assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion())); @@ -88,9 +88,9 @@ public void testRandomVersionBetween() { assertEquals(got, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); assertEquals(got, Version.CURRENT); - got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_beta1, - Version.V_6_0_0_beta1); - assertEquals(got, Version.V_6_0_0_beta1); + got = VersionUtils.randomVersionBetween(random(), fromId(7000099), + fromId(7000099)); + assertEquals(got, fromId(7000099)); // implicit range of one got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 7d7263699be88..fac390b6e7692 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -108,9 +108,8 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); assertThat(testSection.getSkipSection(), notNullValue()); - assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); - assertThat(testSection.getSkipSection().getUpperVersion(), - equalTo(Version.V_6_2_0)); + assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.fromString("6.0.0"))); + assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.fromString("6.2.0"))); assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection)testSection.getExecutableSections().get(0); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java index da485a8430e28..48655a61813d8 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java @@ -148,7 +148,7 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), - equalTo(Version.V_6_0_0)); + equalTo(Version.fromString("6.0.0"))); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java index e2d30d0bc2099..bf73f2efba42a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java @@ -118,7 +118,7 @@ public void testParseSetupAndSkipSectionNoSkip() throws Exception { assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); assertThat(setupSection.getSkipSection(), notNullValue()); - assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); + assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.fromString("6.0.0"))); assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_6_3_0)); assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java index e5e466a82cc18..e92ef2ce13576 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java @@ -37,7 +37,7 @@ public void testSkip() { SkipSection section = new SkipSection("6.0.0 - 6.1.0", randomBoolean() ? Collections.emptyList() : Collections.singletonList("warnings"), "foobar"); assertFalse(section.skip(Version.CURRENT)); - assertTrue(section.skip(Version.V_6_0_0)); + assertTrue(section.skip(Version.fromString("6.0.0"))); section = new SkipSection(randomBoolean() ? null : "6.0.0 - 6.1.0", Collections.singletonList("boom"), "foobar"); assertTrue(section.skip(Version.CURRENT)); @@ -54,15 +54,16 @@ public void testMessage() { } public void testParseSkipSectionVersionNoFeature() throws Exception { + Version version = VersionUtils.randomVersion(random()); parser = createParser(YamlXContent.yamlXContent, - "version: \" - 6.1.1\"\n" + + "version: \" - " + version + "\"\n" + "reason: Delete ignores the parent param" ); SkipSection skipSection = SkipSection.parse(parser); assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); - assertThat(skipSection.getUpperVersion(), equalTo(Version.V_6_1_1)); + assertThat(skipSection.getUpperVersion(), equalTo(version)); assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java index 96bff85389c8a..b2baf40267287 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java @@ -75,8 +75,8 @@ public void testParseWithSkip() throws Exception { TeardownSection section = TeardownSection.parse(parser); assertThat(section, notNullValue()); assertThat(section.getSkipSection().isEmpty(), equalTo(false)); - assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); - assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_6_3_0)); + assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.fromString("6.0.0"))); + assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.fromString("6.3.0"))); assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); assertThat(section.getDoSections().size(), equalTo(2)); assertThat(((DoSection)section.getDoSections().get(0)).getApiCallSection().getApi(), equalTo("delete")); diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 5e56414afed24..0075b4989e69f 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -21,6 +21,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } @@ -70,6 +73,7 @@ File xpackResources = new File(xpackProject('plugin').projectDir, 'src/test/reso project.copyRestSpec.from(xpackResources) { include 'rest-api-spec/api/**' } +File jwks = new File(xpackProject('test:idp-fixture').projectDir, 'oidc/op-jwks.json') integTestCluster { setting 'xpack.security.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' @@ -78,9 +82,22 @@ integTestCluster { setting 'xpack.monitoring.exporters._local.type', 'local' setting 'xpack.monitoring.exporters._local.enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.authc.realms.file.file.order', '0' + setting 'xpack.security.authc.realms.native.native.order', '1' + setting 'xpack.security.authc.realms.oidc.oidc1.order', '2' + setting 'xpack.security.authc.realms.oidc.oidc1.op.issuer', 'http://127.0.0.1:8080' + setting 'xpack.security.authc.realms.oidc.oidc1.op.authorization_endpoint', "http://127.0.0.1:8080/c2id-login" + setting 'xpack.security.authc.realms.oidc.oidc1.op.token_endpoint', "http://127.0.0.1:8080/c2id/token" + setting 'xpack.security.authc.realms.oidc.oidc1.op.jwkset_path', 'op-jwks.json' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.redirect_uri', 'https://my.fantastic.rp/cb' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.client_id', 'elasticsearch-rp' + keystoreSetting 'xpack.security.authc.realms.oidc.oidc1.rp.client_secret', 'b07efb7a1cf6ec9462afe7b6d3ab55c6c7880262aa61ac28dded292aca47c9a2' + setting 'xpack.security.authc.realms.oidc.oidc1.rp.response_type', 'id_token' + setting 'xpack.security.authc.realms.oidc.oidc1.claims.principal', 'sub' setupCommand 'setupTestAdmin', 'bin/elasticsearch-users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser' waitCondition = waitWithAuth + extraConfigFile 'op-jwks.json', jwks } @@ -741,11 +758,11 @@ setups['app0102_privileges'] = ''' "name": "read", "actions": [ "data:read/*", - "action:login" ], + "action:login" ], "metadata": { "description": "Read access to myapp" } } - } + } } ''' diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/x-pack/docs/en/rest-api/security.asciidoc index c04bae90801ee..abad1e38d77fd 100644 --- a/x-pack/docs/en/rest-api/security.asciidoc +++ b/x-pack/docs/en/rest-api/security.asciidoc @@ -76,6 +76,8 @@ native realm: * <> * <> +[float] +[[security-openid-apis]] === OpenID Connect You can use the following APIs to authenticate users against an OpenID Connect @@ -110,7 +112,7 @@ include::security/get-users.asciidoc[] include::security/has-privileges.asciidoc[] include::security/invalidate-api-keys.asciidoc[] include::security/invalidate-tokens.asciidoc[] -include::security/ssl.asciidoc[] include::security/oidc-prepare-authentication-api.asciidoc[] include::security/oidc-authenticate-api.asciidoc[] include::security/oidc-logout-api.asciidoc[] +include::security/ssl.asciidoc[] diff --git a/x-pack/docs/en/rest-api/security/authenticate.asciidoc b/x-pack/docs/en/rest-api/security/authenticate.asciidoc index 51b0d64419453..d23c410a62389 100644 --- a/x-pack/docs/en/rest-api/security/authenticate.asciidoc +++ b/x-pack/docs/en/rest-api/security/authenticate.asciidoc @@ -46,11 +46,11 @@ The following example output provides information about the "rdeniro" user: "metadata": { }, "enabled": true, "authentication_realm": { - "name" : "default_file", + "name" : "file", "type" : "file" }, "lookup_realm": { - "name" : "default_file", + "name" : "file", "type" : "file" } } diff --git a/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc index 0efb2b23145f7..bc60e4fbf231d 100644 --- a/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-authenticate-api.asciidoc @@ -51,7 +51,7 @@ POST /_security/oidc/authenticate } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] +// TEST[catch:unauthorized] The following example output contains the access token that was generated in response, the amount of time (in seconds) that the token expires in, the type, and the refresh token: diff --git a/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc index 6f5288a135f2a..cb8840ca53590 100644 --- a/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-logout-api.asciidoc @@ -39,7 +39,7 @@ POST /_security/oidc/logout } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] +// TEST[catch:unauthorized] The following example output of the response contains the URI pointing to the End Session Endpoint of the OpenID Connect Provider with all the parameters of the Logout Request, as HTTP GET parameters diff --git a/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc b/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc index aeb400ce97ef1..a6ce410be6ee6 100644 --- a/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc +++ b/x-pack/docs/en/rest-api/security/oidc-prepare-authentication-api.asciidoc @@ -57,20 +57,19 @@ POST /_security/oidc/prepare } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] - The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=0o43gasov3TxMWJOt839", + "redirect" : "http://127.0.0.1:8080/c2id-login?scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=elasticsearch-rp", "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" } -------------------------------------------------- -// NOTCONSOLE +// TESTRESPONSE[s/4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I/\$\{body.state\}/] +// TESTRESPONSE[s/WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM/\$\{body.nonce\}/] The following example generates an authentication request for the OpenID Connect Realm `oidc1`, where the values for the state and the nonce have been generated by the client @@ -85,7 +84,6 @@ POST /_security/oidc/prepare } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters @@ -93,12 +91,12 @@ OpenID Connect Provider with all the parameters of the Authentication Request, a [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=0o43gasov3TxMWJOt839", + "redirect" : "http://127.0.0.1:8080/c2id-login?scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=elasticsearch-rp", "state" : "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO", "nonce" : "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5" } -------------------------------------------------- -// NOTCONSOLE +// TESTRESPONSE The following example generates an authentication request for a 3rd party initiated single sign on, specifying the issuer that should be used for matching the appropriate OpenID Connect Authentication realm @@ -107,12 +105,11 @@ issuer that should be used for matching the appropriate OpenID Connect Authentic -------------------------------------------------- POST /_security/oidc/prepare { - "issuer" : "https://op-issuer.org:8800", + "iss" : "http://127.0.0.1:8080", "login_hint": "this_is_an_opaque_string" } -------------------------------------------------- // CONSOLE -// TEST[skip:These are properly tested in the OpenIDConnectIT suite] The following example output of the response contains the URI pointing to the Authorization Endpoint of the OpenID Connect Provider with all the parameters of the Authentication Request, as HTTP GET parameters @@ -120,9 +117,10 @@ OpenID Connect Provider with all the parameters of the Authentication Request, a [source,js] -------------------------------------------------- { - "redirect" : "https://op-provider.org/login?scope=openid&response_type=code&redirect_uri=http%3A%2F%2Foidc-kibana.elastic.co%3A5603%2Fkmi%2Fapi%2Fsecurity%2Fv1%2Foidc&state=lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO&nonce=zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5&client_id=0o43gasov3TxMWJOt839&login_hint=this_is_an_opaque_string", + "redirect" : "http://127.0.0.1:8080/c2id-login?login_hint=this_is_an_opaque_string&scope=openid&response_type=id_token&redirect_uri=https%3A%2F%2Fmy.fantastic.rp%2Fcb&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I&nonce=WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM&client_id=elasticsearch-rp", "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" } -------------------------------------------------- -// NOTCONSOLE \ No newline at end of file +// TESTRESPONSE[s/4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I/\$\{body.state\}/] +// TESTRESPONSE[s/WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM/\$\{body.nonce\}/] \ No newline at end of file diff --git a/x-pack/docs/en/security/authentication/oidc-guide.asciidoc b/x-pack/docs/en/security/authentication/oidc-guide.asciidoc new file mode 100644 index 0000000000000..df5ce11c63c14 --- /dev/null +++ b/x-pack/docs/en/security/authentication/oidc-guide.asciidoc @@ -0,0 +1,649 @@ +[role="xpack"] +[[oidc-guide]] + +== Configuring single sign-on to the {stack} using OpenID Connect + +The Elastic Stack supports single sign-on (SSO) using OpenID Connect via {kib} using +{es} as the backend service that holds most of the functionality. {kib} and {es} +together represent an OpenID Connect Relying Party (RP) that supports the Authorization +Code Flow as this is defined in the OpenID Connect specification. + +This guide assumes that you have an OpenID Connect Provider where the +Elastic Stack Relying Party will be registered. + +NOTE: The OpenID Connect realm support in {kib} is designed with the expectation that it +will be the primary authentication method for the users of that {kib} instance. The +<> section describes what this entails and how you can set it up to support +other realms if necessary. + +[[oidc-guide-op]] +=== The OpenID Connect Provider + +The OpenID Connect Provider (OP) is the entity in OpenID Connect that is responsible for +authenticating the user and for granting the necessary tokens with the authentication and +user information to be consumed by the Relying Parties. + +In order for the Elastic Stack to be able use your OpenID Connect Provider for authentication, +a trust relationship needs to be established between the OP and the RP. In the OpenID Connect +Provider, this means registering the RP as a client. OpenID Connect defines a dynamic client +registration protocol but this is usually geared towards real-time client registration and +not the trust establishment process for cross security domain single sign on. All OPs will +also allow for the manual registration of an RP as a client, via a user interface or (less often) +via the consumption of a metadata document. + +The process for registering the Elastic Stack RP will be different from OP to OP and following +the provider's relevant documentation is prudent. The information for the +RP that you commonly need to provide for registration are the following: + +- `Relying Party Name`: An arbitrary identifier for the relying party. Neither the specification +nor the Elastic Stack implementation impose any constraints on this value. +- `Redirect URI`: This is the URI where the OP will redirect the user's browser after authentication. The +appropriate value for this will depend on your setup and whether or not {kib} sits behind a proxy or +load balancer. It will typically be +$\{kibana-url}/api/security/v1/oidc+ where _$\{kibana-url}_ +is the base URL for your {kib} instance. You might also see this called `Callback URI`. + +At the end of the registration process, the OP will assign a Client Identifier and a Client Secret for the RP ({stack}) to use. +Note these two values as they will be used in the {es} configuration. + +[[oidc-guide-authentication]] +=== Configure {es} for OpenID Connect authentication + +The following is a summary of the configuration steps required in order to enable authentication +using OpenID Connect in {es}: + +. <> +. <> +. <> +. <> + +[[oidc-enable-http]] +==== Enable TLS for HTTP + +If your {es} cluster is operating in production mode, then you must +configure the HTTP interface to use SSL/TLS before you can enable OpenID Connect +authentication. + +For more information, see +{ref}/configuring-tls.html#tls-http[Encrypting HTTP Client Communications]. + +[[oidc-enable-token]] +==== Enable the token service + +The {es} OpenID Connect implementation makes use of the {es} Token Service. This service +is automatically enabled if you configure TLS on the HTTP interface, and can be +explicitly configured by including the following in your `elasticsearch.yml` file: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authc.token.enabled: true +------------------------------------------------------------ + +[[oidc-create-realm]] +==== Create an OpenID Connect realm + +OpenID Connect based authentication is enabled by configuring the appropriate realm within +the authentication chain for {es}. + +This realm has a few mandatory settings, and a number of optional settings. +The available settings are described in detail in the +{ref}/security-settings.html#ref-oidc-settings[Security settings in {es}]. This +guide will explore the most common settings. + +Create an OpenID Connect (the realm type is `oidc`) realm in your `elasticsearch.yml` file +similar to what is shown below: + +NOTE: The values used below are meant to be an example and are not intended to apply to +every use case. The details below the configuration snippet provide insights and suggestions +to help you pick the proper values, depending on your OP configuration. + +[source, yaml] +------------------------------------------------------------------------------------- +xpack.security.authc.realms.oidc.oidc1: + order: 2 + rp.client_id: "the_client_id" + rp.response_type: code + rp.redirect_uri: "https://kibana.example.org:5601/api/security/v1/oidc" + op.authorization_endpoint: "https://op.example.org/oauth2/v1/authorize" + op.token_endpoint: "https://op.example.org/oauth2/v1/token" + op.userinfo_endpoint: "https://op.example.org/oauth2/v1/userinfo" + op.endsession_endpoint: "https://op.example.org/oauth2/v1/logout" + op.issuer: "https://op.example.org" + op.jwkset_path: oidc/jwkset.json + claims.principal: sub + claims.groups: "http://example.info/claims/groups" +------------------------------------------------------------------------------------- + +The configuration values used in the example above are: + +xpack.security.authc.realms.oidc.oidc1:: + This defines a new `oidc` authentication realm named "oidc1". + See <> for more explanation of realms. + +order:: + You should define a unique order on each realm in your authentication chain. + It is recommended that the OpenID Connect realm be at the bottom of your authentication + chain (that is, that it has the _highest_ order). + +rp.client_id:: + This, usually opaque, arbitrary string, is the Client Identifier that was assigned to the Elastic Stack RP by the OP upon + registration. + +rp.response_type:: + This is an identifier that controls which OpenID Connect authentication flow this RP supports and also + which flow this RP requests the OP should follow. Supported values are + - `code`, which means that the RP wants to use the Authorization Code flow. If your OP supports the + Authorization Code flow, you should select this instead of the Implicit Flow. + - `id_token token` which means that the RP wants to use the Implicit flow and we also request an oAuth2 + access token from the OP, that we can potentially use for follow up requests ( UserInfo ). This + should be selected if the OP offers a UserInfo endpoint in its configuration, or if you know that + the claims you will need to use for role mapping are not available in the ID Token. + - `id_token` which means that the RP wants to use the Implicit flow, but is not interested in getting + an oAuth2 token too. Select this if you are certain that all necessary claims will be contained in + the ID Token or if the OP doesn't offer a User Info endpoint. + +rp.redirect_uri:: + The redirect URI where the OP will redirect the browser after authentication. This needs to be + _exactly_ the same as the one <> and will + typically be +$\{kibana-url}/api/security/v1/oidc+ where _$\{kibana-url}_ is the base URL for your {kib} instance + +op.authorization_endpoint:: + The URL for the Authorization Endpoint in the OP. This is where the user's browser + will be redirected to start the authentication process. The value for this setting should be provided by your + OpenID Connect Provider. + +op.token_endpoint:: + The URL for the Token Endpoint in the OpenID Connect Provider. This is the endpoint where + {es} will send a request to exchange the code for an ID Token, in the case where the Authorization Code + flow is used. The value for this setting should be provided by your OpenID Connect Provider. + +op.userinfo_endpoint:: + (Optional) The URL for the UserInfo Endpoint in the OpenID Connect Provider. This is the endpoint of the OP that + can be queried to get further user information, if required. The value for this setting should be provided by your + OpenID Connect Provider. + +op.endsession_endpoint:: + (Optional) The URL to the End Session Endpoint in the OpenID Connect Provider. This is the endpoint where the user's + browser will be redirected after local logout, if the realm is configured for RP initiated Single Logout and + the OP supports it. The value for this setting should be provided by your OpenID Connect Provider. + +op.jwkset_path:: + The path to a file containing a JSON Web Key Set with the key material that the OpenID Connect + Provider uses for signing tokens and claims responses. The path is resolved relative to the {es} + config directory. + {es} will automatically monitor this file for changes and will reload the configuration whenever + it is updated. Your OpenID Connect Provider should provide you with this file. + +claims.principal:: See <>. +claims.groups:: See <>. + +A final piece of configuration of the OpenID Connect realm is to set the `Client Secret` that was assigned +to the RP during registration in the OP. This is a secure setting and as such is not defined in the realm +configuration in `elasticsearch.yml` but added to the {ref}/secure-settings.html[elasticsearch keystore]. +For instance + + +[source,sh] +---- +bin/elasticsearch-keystore add xpack.security.authc.realms.oidc.oidc1.rp.client_secret +---- + + +NOTE: According to the OpenID Connect specification, the OP should also make their configuration +available at a well known URL, which is the concatenation of their `Issuer` value with the +`.well-known/openid-configuration` string. For example: `https://op.org.com/.well-known/openid-configuration` +That document should contain all the necessary information to configure the OpenID Connect realm in {es}. + + +[[oidc-claims-mapping]] +==== Claims mapping + +===== Claims and scopes + +When authenticating to {kib} using OpenID Connect, the OP will provide information about the user +in the form of OpenID Connect Claims, that can be included either in the ID Token, or be retrieved from the +UserInfo endpoint of the OP. The claim is defined as a piece of information asserted by the OP +for the authenticated user. Simply put, a claim is a name/value pair that contains information about +the user. Related to claims, we also have the notion of OpenID Connect Scopes. Scopes are identifiers +that are used to request access to specific lists of claims. The standard defines a set of scope +identifiers that can be requested. The only mandatory one is `openid`, while commonly used ones are +`profile` and `email`. The `profile` scope requests access to the `name`,`family_name`,`given_name`,`middle_name`,`nickname`, +`preferred_username`,`profile`,`picture`,`website`,`gender`,`birthdate`,`zoneinfo`,`locale`, and `updated_at` claims. +The `email` scope requests access to the `email` and `email_verified` claims. The process is that +the RP requests specific scopes during the authentication request. If the OP Privacy Policy +allows it and the authenticating user consents to it, the related claims are returned to the +RP (either in the ID Token or as a UserInfo response). + +The list of the supported claims will vary depending on the OP you are using, but you can expect +the https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims[Standard Claims] to be +largely supported. + +[[oidc-claim-to-property]] +===== Mapping claims to user properties + +The goal of claims mapping is to configure {es} in such a way as to be able to map the values of +specified returned claims to one of the <> that are supported +by {es}. These user properties are then utilized to identify the user in the {kib} UI or the audit +logs, and can also be used to create <> rules. + +The recommended steps for configuring OpenID Claims mapping are as follows: + +. Consult your OP configuration to see what claims it might support. Note that + the list provided in the OP's metadata or in the configuration page of the OP + is a list of potentially supported claims. However, for privacy reasons it might + not be a complete one, or not all supported claims will be available for all + authenticated users. + +. Read through the list of <> that {es} + supports, and decide which of them are useful to you, and can be provided by + your OP in the form of claims. At a _minimum_, the `principal` user property + is required. + +. Configure your OP to "release" those claims to your {stack} Relying + party. This process greatly varies by provider. You can use a static + configuration while others will support that the RP requests the scopes that + correspond to the claims to be "released" on authentication time. See + {ref}/security-settings.html#ref-oidc-settings[`rp.requested_scopes`] for details about how + to configure the scopes to request. To ensure interoperability and minimize + the errors, you should only request scopes that the OP supports, and which you + intend to map to {es} user properties. + +. Configure the OpenID Connect realm in {es} to associate the {es} user properties (see + <> below), to the name of the claims that your + OP will release. In the example above, we have configured the `principal` and + `groups` user properties as follows: + + .. `claims.principal: sub` : This instructs {es} to look for the OpenID Connect claim named `sub` + in the ID Token that the OP issued for the user ( or in the UserInfo response ) and assign the + value of this claim to the `principal` user property. `sub` is a commonly used claim for the + principal property as it is an identifier of the user in the OP and it is also a required + claim of the ID Token, thus offering guarantees that it will be available. It is, however, + only used as an example here, the OP may provide another claim that is a better fit for your needs. + + .. `claims.groups: "http://example.info/claims/groups"` : Similarly, this instructs {es} to look + for the claim with the name `http://example.info/claims/groups` (note that this is a URI - an + identifier, treated as a string and not a URL pointing to a location that will be retrieved) + either in the ID Token or in the UserInfo response, and map the value(s) of it to the user + property `groups` in {es}. There is no standard claim in the specification that is used for + expressing roles or group memberships of the authenticated user in the OP, so the name of the + claim that should be mapped here, will vary greatly between providers. Consult your OP + documentation for more details. + +[[oidc-user-properties]] +===== {es} user properties + +The {es} OpenID Connect realm can be configured to map OpenID Connect claims to the +following properties on the authenticated user: + +principal:: _(Required)_ + This is the _username_ that will be applied to a user that authenticates + against this realm. + The `principal` appears in places such as the {es} audit logs. + +NOTE: If the principal property fails to be mapped from a claim, the authentication fails. + +groups:: _(Recommended)_ + If you wish to use your OP's concept of groups or roles as the basis for a + user's {es} privileges, you should map them with this property. + The `groups` are passed directly to your <>. + +name:: _(Optional)_ The user's full name. +mail:: _(Optional)_ The user's email address. +dn:: _(Optional)_ The user's X.500 _Distinguished Name_. + + +===== Extracting partial values from OpenID Connect claims + +There are some occasions where the value of a claim may contain more information +than you wish to use within {es}. A common example of this is one where the +OP works exclusively with email addresses, but you would like the user's +`principal` to use the _local-name_ part of the email address. +For example if their email address was `james.wong@staff.example.com`, then you +would like their principal to simply be `james.wong`. + +This can be achieved using the `claim_patterns` setting in the {es} +realm, as demonstrated in the realm configuration below: + +[source, yaml] +------------------------------------------------------------------------------------- +xpack.security.authc.realms.oidc.oidc1: + rp.client_id: "the_client_id" + rp.response_type: code + rp.redirect_uri: "https://kibana.example.org:5601/api/security/v1/oidc" + op.authorization_endpoint: "https://op.example.org/oauth2/v1/authorize" + op.token_endpoint: "https://op.example.org/oauth2/v1/token" + op.userinfo_endpoint: "https://op.example.org/oauth2/v1/userinfo" + op.endsession_endpoint: "https://op.example.org/oauth2/v1/logout" + op.issuer: "https://op.example.org" + op.jwkset_path: oidc/jwkset.json + claims.principal: email_verified + claim_patterns.principal: "^([^@]+)@staff\\.example\\.com$" +------------------------------------------------------------------------------------- + +In this case, the user's `principal` is mapped from the `email_verified` claim, but a +regular expression is applied to the value before it is assigned to the user. +If the regular expression matches, then the result of the first group is used as the +effective value. If the regular expression does not match then the claim +mapping fails. + +In this example, the email address must belong to the `staff.example.com` domain, +and then the local-part (anything before the `@`) is used as the principal. +Any users who try to login using a different email domain will fail because the +regular expression will not match against their email address, and thus their +principal user property - which is mandatory - will not be populated. + +IMPORTANT: Small mistakes in these regular expressions can have significant +security consequences. For example, if we accidentally left off the trailing +`$` from the example above, then we would match any email address where the +domain starts with `staff.example.com`, and this would accept an email +address such as `admin@staff.example.com.attacker.net`. It is important that +you make sure your regular expressions are as precise as possible so that +you do not inadvertently open an avenue for user impersonation attacks. + +[[third-party-login]] +==== Third party initiated single sign-on + +The Open ID Connect realm in {es} supports 3rd party initiated login as described in the +https://openid.net/specs/openid-connect-core-1_0.html#ThirdPartyInitiatedLogin[relevant specification]. + +This allows the OP itself or another, third party other than the RP, to initiate the authentication +process while requesting the OP to be used for the authentication. Please note that the Elastic +Stack RP should already be configured for this OP, in order for this process to succeed. + + +[[oidc-logout]] +==== OpenID Connect Logout + +The OpenID Connect realm in {es} supports RP-Initiated Logout Functionality as +described in the +https://openid.net/specs/openid-connect-session-1_0.html#RPLogout[relevant part of the specification] + +In this process, the OpenID Connect RP (the Elastic Stack in this case) will redirect the user's +browser to predefined URL of the OP after successfully completing a local logout. The OP can then +logout the user also, depending on the configuration, and should finally redirect the user back to the +RP. The `op.endsession_endpoint` in the realm configuration determines the URL in the OP that the browser +will be redirected to. The `rp.post_logout_redirect_uri` setting determines the URL to redirect +the user back to after the OP logs them out. + +When configuring `rp.post_logout_redirect_uri`, care should be taken to not point this to a URL that +will trigger re-authentication of the user. For instance, when using OpenID Connect to support +single sign-on to {kib}, this could be set to +$\{kibana-url}/logged_out+, which will show a user- +friendly message to the user. + +[[oidc-role-mapping]] +=== Configuring role mappings + +When a user authenticates using OpenID Connect, they are identified to the Elastic Stack, +but this does not automatically grant them access to perform any actions or +access any data. + +Your OpenID Connect users cannot do anything until they are assigned roles. This can be done +through either the +{ref}/security-api-put-role-mapping.html[add role mapping API], or with +<>. + +NOTE: You cannot use {stack-ov}/mapping-roles.html#mapping-roles-file[role mapping files] +to grant roles to users authenticating via OpenID Connect. + +This is an example of a simple role mapping that grants the `kibana_user` role +to any user who authenticates against the `oidc1` OpenID Connect realm: + +[source,js] +-------------------------------------------------- +PUT /_security/role_mapping/oidc-kibana +{ + "roles": [ "kibana_user" ], + "enabled": true, + "rules": { + "field": { "realm.name": "oidc1" } + } +} +-------------------------------------------------- +// CONSOLE +// TEST + + +The user properties that are mapped via the realm configuration are used to process +role mapping rules, and these rules determine which roles a user is granted. + +The user fields that are provided to the role +mapping are derived from the OpenID Connect claims as follows: + +- `username`: The `principal` user property +- `dn`: The `dn` user property +- `groups`: The `groups` user property +- `metadata`: See <> + +For more information, see <> and +{ref}/security-api.html#security-role-mapping-apis[role mapping APIs]. + +If your OP has the ability to provide groups or roles to RPs via tha use of +an OpenID Claim, then you should map this claim to the `claims.groups` setting in +the {es} realm (see <>), and then make use of it in a role mapping +as per the example below. + +This mapping grants the {es} `finance_data` role, to any users who authenticate +via the `oidc1` realm with the `finance-team` group membership. + +[source,js] +-------------------------------------------------- +PUT /_security/role_mapping/oidc-finance +{ + "roles": [ "finance_data" ], + "enabled": true, + "rules": { "all": [ + { "field": { "realm.name": "oidc1" } }, + { "field": { "groups": "finance-team" } } + ] } +} +-------------------------------------------------- +// CONSOLE +// TEST + +If your users also exist in a repository that can be directly accessed by {es} +(such as an LDAP directory) then you can use +<> instead of role mappings. + +In this case, you perform the following steps: +1. In your OpenID Connect realm, assign a claim to act as the lookup userid, + by configuring the `claims.principal` setting. +2. Create a new realm that can lookup users from your local repository (e.g. an + `ldap` realm) +3. In your OpenID Connect realm, set `authorization_realms` to the name of the realm you + created in step 2. + +[[oidc-user-metadata]] +=== User metadata + +By default users who authenticate via OpenID Connect will have some additional metadata +fields. These fields will include every OpenID Claim that is provided in the authentication response +(regardless of whether it is mapped to an {es} user property). For example, +in the metadata field `oidc(claim_name)`, "claim_name" is the name of the +claim as it was contained in the ID Token or in the User Info response. Note that these will +include all the https://openid.net/specs/openid-connect-core-1_0.html#IDToken[ID Token claims] +that pertain to the authentication event, rather than the user themselves. + +This behaviour can be disabled by adding `populate_user_metadata: false` as +a setting in the oidc realm. + +[[oidc-kibana]] +=== Configuring {kib} + +OpenID Connect authentication in {kib} requires a small number of additional settings +in addition to the standard {kib} security configuration. The +{kibana-ref}/using-kibana-with-security.html[{kib} security documentation] +provides details on the available configuration options that you can apply. + +In particular, since your {es} nodes have been configured to use TLS on the HTTP +interface, you must configure {kib} to use a `https` URL to connect to {es}, and +you may need to configure `elasticsearch.ssl.certificateAuthorities` to trust +the certificates that {es} has been configured to use. + +OpenID Connect authentication in {kib} is also subject to the +`xpack.security.sessionTimeout` setting that is described in the {kib} security +documentation, and you may wish to adjust this timeout to meet your local needs. + +The three additional settings that are required for OpenID Connect support are shown below: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authProviders: [oidc] +xpack.security.auth.oidc.realm: "oidc1" +server.xsrf.whitelist: [/api/security/v1/oidc] +------------------------------------------------------------ + +The configuration values used in the example above are: + +`xpack.security.authProviders`:: +Set this to `[ oidc ]` to instruct {kib} to use OpenID Connect single sign-on as the +authentication method. This instructs Kibana to attempt to initiate an SSO flow +everytime a user attempts to access a URL in Kibana, if the user is not already +authenticated. If you also want to allow users to login with a username and password, +you must enable the `basic` authProvider too. For example: + +[source, yaml] +------------------------------------------------------------ +xpack.security.authProviders: [oidc, basic] +------------------------------------------------------------ + +This will allow users that haven't already authenticated with OpenID Connect to +navigate directly to the `/login` page in {kib} in order to use the login form. + +`xpack.security.auth.oidc.realm`:: +The name of the OpenID Connect realm in {es} that should handle authentication +for this Kibana instance. + +`server.xsrf.whitelist`:: +{kib} has in-built protection against _Cross Site Request Forgery_ attacks, which +is designed to prevent the {kib} server from processing requests that +originated from outside the {kib} application. +In order to support OpenID Connect messages that originate from your +OP or a third party (see <>, we need to explicitly _whitelist_ the +OpenID Connect authentication endpoint within {kib}, so that the {kib} server will +not reject these external messages. + + +=== OpenID Connect without {kib} + +The OpenID Connect realm is designed to allow users to authenticate to {kib} and as +such, most of the parts of the guide above make the assumption that {kib} is used. +This section describes how a custom web application could use the relevant OpenID +Connect REST APIs in order to authenticate the users to {es}, with OpenID Connect. + +Single sign-on realms such as OpenID Connect and SAML make use of the Token Service in +{es} and in principle exchange a SAML or OpenID Connect Authentication response for +an {es} access token and a refresh token. The access token is used as credentials for subsequent calls to {es}. The +refresh token enables the user to get new {es} access tokens after the current one +expires. + +NOTE: The {es} Token Service can be seen as a minimal oAuth2 authorization server +and the access token and refresh token mentioned above are tokens that pertain +_only_ to this authorization server. They are generated and consumed _only_ by {es} +and are in no way related to the tokens ( access token and ID Token ) that the +OpenID Connect Provider issues. + +==== Register the RP with an OpenID Connect Provider + +The Relying Party ( {es} and the custom web app ) will need to be registered as +client with the OpenID Connect Provider. Note that when registering the +`Redirect URI`, it needs to be a URL in the custom web app. + +==== OpenID Connect Realm + +An OpenID Connect realm needs to be created and configured accordingly +in {es}. See <> + +==== Service Account user for accessing the APIs + +The realm is designed with the assumption that there needs to be a privileged entity +acting as an authentication proxy. In this case, the custom web application is the +authentication proxy handling the authentication of end users ( more correctly, +"delegating" the authentication to the OpenID Connect Provider ). The OpenID Connect +APIs require authentication and the necessary authorization level for the authenticated +user. For this reason, a Service Account user needs to be created and assigned a role +that gives them the `manage_oidc` cluster privilege. The use of the `manage_token` +cluster privilege will be necessary after the authentication takes place, so that the +the user can maintain access or be subsequently logged out. + +[source,js] +-------------------------------------------------- +POST /_security/role/facilitator-role +{ + "cluster" : ["manage_oidc", "manage_token"] +} +-------------------------------------------------- +// CONSOLE + + +[source,js] +-------------------------------------------------- +POST /_security/user/facilitator +{ + "password" : "", + "roles" : [ "facilitator-role"] +} +-------------------------------------------------- +// CONSOLE + + +==== Handling the authentication flow + +On a high level, the custom web application would need to perform the following steps in order to +authenticate a user with OpenID Connect: + +. Make an HTTP POST request to `_security/oidc/prepare`, authenticating as the `facilitator` user, using the name of the +OpenID Connect realm in the {es} configuration in the request body. See the +{ref}/security-api-oidc-prepare-authentication.html[OIDC Prepare Authentication API] for more details ++ +[source,js] +-------------------------------------------------- +POST /_security/oidc/prepare +{ + "realm" : "oidc1" +} +-------------------------------------------------- +// CONSOLE ++ +. Handle the response to `/_security/oidc/prepare`. The response from {es} will contain 3 parameters: + `redirect`, `state`, `nonce`. The custom web application would need to store the values for `state` + and `nonce` in the user's session (client side in a cookie or server side if session information is + persisted this way) and redirect the user's browser to the URL that will be contained in the + `redirect` value. +. Handle a subsequent response from the OP. After the user is successfully authenticated with the + OpenID Connect Provider, they will be redirected back to the callback/redirect URI. Upon receiving + this HTTP GET request, the custom web app will need to make an HTTP POST request to + `_security/oidc/authenticate`, again - authenticating as the `facilitator` user - passing the URL + where the user's browser was redirected to, as a parameter, along with the + values for `nonce` and `state` it had saved in the user's session previously. + See {ref}/security-api-oidc-authenticate.html[OIDC Authenticate API] for more details ++ +[source,js] +----------------------------------------------------------------------- +POST /_security/oidc/authenticate +{ + "redirect_uri" : "https://oidc-kibana.elastic.co:5603/api/security/v1/oidc?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", + "state" : "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I", + "nonce" : "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM" +} +----------------------------------------------------------------------- +// CONSOLE +// TEST[catch:unauthorized] ++ +Elasticsearch will validate this and if all is correct will respond with an access token that can be used + as a `Bearer` token for subsequent requests and a refresh token that can be later used to refresh the given + access token as described in {ref}/security-api-get-token.html[get token API]. +. At some point, if necessary, the custom web application can log the user out by using the + {ref}/security-api-oidc-logout.html[OIDC Logout API] passing the access token and refresh token as parameters. For example: ++ +[source,js] +-------------------------------------------------- +POST /_security/oidc/logout +{ + "token" : "dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==", + "refresh_token": "vLBPvmAB6KvwvJZr27cS" +} +-------------------------------------------------- +// CONSOLE +// TEST[catch:unauthorized] ++ +If the realm is configured accordingly, this may result in a response with a `redirect` parameter indicating where +the user needs to be redirected in the OP in order to complete the logout process. diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index b34e6e0c0e9f2..fdc49ef21e213 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -11,38 +11,31 @@ such as encrypting communications, role-based access control, IP filtering, and auditing. For more information, see {stack-ov}/elasticsearch-security.html[Securing the {stack}]. -To use {es} {security-features}: - -. Verify that you are using a license that includes the {security-features}. +. Verify that you are using a license that includes the specific +{security-features} you want. + -- -If you want to try all of the platinum features, you can start a 30-day trial. -At the end of the trial period, you can purchase a subscription to keep using -the full functionality. For more information, see -https://www.elastic.co/subscriptions and -{stack-ov}/license-management.html[License Management]. +For more information, see https://www.elastic.co/subscriptions and +{stack-ov}/license-management.html[License management]. -- . Verify that the `xpack.security.enabled` setting is `true` on each node in -your cluster. If you are using a trial license, the default value is `false`. -For more information, see {ref}/security-settings.html[Security Settings in {es}]. +your cluster. If you are using basic or trial licenses, the default value is `false`. +For more information, see {ref}/security-settings.html[Security settings in {es}]. . If you plan to run {es} in a Federal Information Processing Standard (FIPS) 140-2 enabled JVM, see <>. -. Configure Transport Layer Security (TLS/SSL) for internode-communication. +. <>. + -- NOTE: This requirement applies to clusters with more than one node and to clusters with a single node that listens on an external interface. Single-node clusters that use a loopback interface do not have this requirement. For more information, see -{stack-ov}/encrypting-communications.html[Encrypting Communications]. +{stack-ov}/encrypting-communications.html[Encrypting communications]. -- -.. <>. - -.. <>. . If it is not already running, start {es}. @@ -72,14 +65,20 @@ user API. -- -. Choose which types of realms you want to use to authenticate users. -** <>. -** <>. -** <>. -** <>. -** <>. -** <>. -** <>. +. Choose which types of realms you want to use to authenticate users. ++ +-- +TIP: The types of authentication realms that you can enable varies according to +your subscription. For more information, see https://www.elastic.co/subscriptions. + +-- +** <> +** <> +** <> +** <> +** <> +** <> +** <> . Set up roles and users to control access to {es}. + @@ -114,10 +113,13 @@ curl -XPOST -u elastic 'localhost:9200/_security/user/johndoe' -H "Content-Type: // NOTCONSOLE -- -. [[enable-auditing]]Enable auditing to keep track of attempted and successful interactions with - your {es} cluster: +. [[enable-auditing]](Optional) Enable auditing to keep track of attempted and +successful interactions with your {es} cluster: + -- +TIP: Audit logging is available with specific subscriptions. For more +information, see https://www.elastic.co/subscriptions. + .. Add the following setting to `elasticsearch.yml` on all nodes in your cluster: + [source,yaml] @@ -134,6 +136,9 @@ Events are logged to a dedicated `_audit.json` file in `ES_HOME/logs`, on each cluster node. -- +To walk through the configuration of {security-features} in {es}, {kib}, {ls}, and {metricbeat}, see +{stack-ov}/security-getting-started.html[Getting started with security]. + :edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/security/securing-communications/securing-elasticsearch.asciidoc include::{es-repo-dir}/security/securing-communications/securing-elasticsearch.asciidoc[] diff --git a/x-pack/docs/en/security/securing-communications.asciidoc b/x-pack/docs/en/security/securing-communications.asciidoc index 63fded729eb8c..6672c0316493e 100644 --- a/x-pack/docs/en/security/securing-communications.asciidoc +++ b/x-pack/docs/en/security/securing-communications.asciidoc @@ -5,8 +5,7 @@ Elasticsearch nodes store data that may be confidential. Attacks on the data may come from the network. These attacks could include sniffing of the data, manipulation of the data, and attempts to gain access to the server and thus the -files storing the data. Securing your nodes is required in order to use a production -license that enables {security-features} and helps reduce the risk from +files storing the data. Securing your nodes helps reduce the risk from network-based attacks. This section shows how to: diff --git a/x-pack/docs/en/watcher/java.asciidoc b/x-pack/docs/en/watcher/java.asciidoc index e5cb6b54b0c65..7224196834f9b 100644 --- a/x-pack/docs/en/watcher/java.asciidoc +++ b/x-pack/docs/en/watcher/java.asciidoc @@ -60,6 +60,7 @@ repositories { // Add the Elasticsearch Maven Repository maven { + name "elastic" url "https://artifacts.elastic.co/maven" } } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 6ce71982f5b1d..fc5dc839ef05e 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -11,6 +11,9 @@ archivesBaseName = 'x-pack' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } subprojects { @@ -187,4 +190,3 @@ integTestCluster { if (integTestCluster.distribution.startsWith("oss-")) { integTest.enabled = false } - diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index a808a7197cccb..4b3ba9307da28 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -18,7 +18,7 @@ String[] noSecurityManagerITClasses = [ "**/CloseFollowerIndexIT.class" ] task internalClusterTestNoSecurityManager(type: Test) { description = 'Java fantasy integration tests with no security manager' - + include noSecurityManagerITClasses systemProperty 'es.set.netty.runtime.available.processors', 'false' systemProperty 'tests.security.manager', 'false' @@ -30,7 +30,7 @@ task internalClusterTest(type: Test) { description = 'Java fantasy integration tests' dependsOn internalClusterTestNoSecurityManager mustRunAfter test - + include '**/*IT.class' exclude noSecurityManagerITClasses systemProperty 'es.set.netty.runtime.available.processors', 'false' @@ -52,6 +52,9 @@ dependencies { compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('monitoring'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index cbf30b54d5fdb..cbba093c5526b 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ccr'), configuration: 'runtime') testCompile project(':x-pack:plugin:ccr:qa') } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 7c9c581c5be19..ba4d7ea2064a1 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ccr'), configuration: 'runtime') testCompile project(':x-pack:plugin:ccr:qa') } diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 6d294c4075595..2ef358b6d735d 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ccr'), configuration: 'runtime') testCompile project(':x-pack:plugin:ccr:qa:') } diff --git a/x-pack/plugin/ccr/qa/rest/build.gradle b/x-pack/plugin/ccr/qa/rest/build.gradle index b06535a17c096..ba0c05e09791a 100644 --- a/x-pack/plugin/ccr/qa/rest/build.gradle +++ b/x-pack/plugin/ccr/qa/rest/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ccr'), configuration: 'runtime') } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 0e082f51d71a1..872e99051018c 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ccr'), configuration: 'runtime') testCompile project(':x-pack:plugin:ccr:qa') } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 20b13474afa82..33b8a274431d2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; @@ -61,7 +62,12 @@ private ShardChangesAction() { @Override public Response newResponse() { - return new Response(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return Response::new; } public static class Request extends SingleShardRequest { @@ -146,7 +152,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); fromSeqNo = in.readVLong(); maxOperationCount = in.readVInt(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); expectedHistoryUUID = in.readString(); pollTimeout = in.readTimeValue(); maxBatchSize = new ByteSizeValue(in); @@ -246,6 +252,17 @@ public long getTookInMillis() { Response() { } + Response(StreamInput in) throws IOException { + super(in); + mappingVersion = in.readVLong(); + settingsVersion = in.readVLong(); + globalCheckpoint = in.readZLong(); + maxSeqNo = in.readZLong(); + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); + operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); + tookInMillis = in.readVLong(); + } + Response( final long mappingVersion, final long settingsVersion, @@ -265,15 +282,8 @@ public long getTookInMillis() { } @Override - public void readFrom(final StreamInput in) throws IOException { - super.readFrom(in); - mappingVersion = in.readVLong(); - settingsVersion = in.readVLong(); - globalCheckpoint = in.readZLong(); - maxSeqNo = in.readZLong(); - maxSeqNoOfUpdatesOrDeletes = in.readZLong(); - operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); - tookInMillis = in.readVLong(); + public void readFrom(final StreamInput in) { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override @@ -459,8 +469,8 @@ protected ShardsIterator shards(ClusterState state, InternalRequest request) { } @Override - protected Response newResponse() { - return new Response(); + protected Writeable.Reader getResponseReader() { + return Response::new; } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index b00bf15782fa3..5d4564a2030c1 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -92,8 +92,8 @@ public class ShardFollowTask extends ImmutableFollowParameters implements XPackP public static ShardFollowTask readFrom(StreamInput in) throws IOException { String remoteCluster = in.readString(); - ShardId followShardId = ShardId.readShardId(in); - ShardId leaderShardId = ShardId.readShardId(in); + ShardId followShardId = new ShardId(in); + ShardId leaderShardId = new ShardId(in); return new ShardFollowTask(remoteCluster, followShardId, leaderShardId, in); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java index 393548225a0c2..91ec057ac4eb6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java @@ -77,8 +77,8 @@ protected PutCcrRestoreSessionResponse shardOperation(PutCcrRestoreSessionReques } @Override - protected PutCcrRestoreSessionResponse newResponse() { - return new PutCcrRestoreSessionResponse(); + protected Writeable.Reader getResponseReader() { + return PutCcrRestoreSessionResponse::new; } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionRequest.java index da0c43116ee76..e348df79223a7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionRequest.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionRequest.java @@ -37,7 +37,7 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); sessionUUID = in.readString(); - shardId = ShardId.readShardId(in); + shardId = new ShardId(in); } @Override diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java index 33c1428c7e19e..2cf6e3bdaf332 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRetentionLeaseIT.java @@ -44,6 +44,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteTransportException; @@ -87,6 +88,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; +@TestLogging(value = "org.elasticsearch.xpack.ccr:trace") public class CcrRetentionLeaseIT extends CcrIntegTestCase { public static final class RetentionLeaseRenewIntervalSettingPlugin extends Plugin { @@ -615,7 +617,6 @@ public void testRetentionLeaseAdvancesWhileFollowing() throws Exception { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/39509") public void testRetentionLeaseRenewalIsCancelledWhenFollowingIsPaused() throws Exception { final String leaderIndex = "leader"; final String followerIndex = "follower"; @@ -922,7 +923,6 @@ public void onResponseReceived( } } - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/39850") public void testForgetFollower() throws Exception { final String leaderIndex = "leader"; final String followerIndex = "follower"; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java index 0e48fc8e57ca3..a5b28caf9dfb2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java @@ -5,10 +5,11 @@ */ package org.elasticsearch.xpack.ccr.action; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.test.AbstractWireSerializingTestCase; -public class ShardChangesResponseTests extends AbstractStreamableTestCase { +public class ShardChangesResponseTests extends AbstractWireSerializingTestCase { @Override protected ShardChangesAction.Response createTestInstance() { @@ -34,8 +35,7 @@ protected ShardChangesAction.Response createTestInstance() { } @Override - protected ShardChangesAction.Response createBlankInstance() { - return new ShardChangesAction.Response(); + protected Writeable.Reader instanceReader() { + return ShardChangesAction.Response::new; } - } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java index 1326f0ebc79bb..947ce78da2ca3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowEngineIndexShardTests.java @@ -80,7 +80,7 @@ public void testDoNotFillGaps() throws Exception { replicaRouting.allocationId()); indexShard.updateShardState(primaryRouting, indexShard.getOperationPrimaryTerm() + 1, (shard, listener) -> {}, 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build()); final CountDownLatch latch = new CountDownLatch(1); ActionListener actionListener = ActionListener.wrap(releasable -> { diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index a25c53506fb85..c20449724f8e0 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -48,8 +48,12 @@ dependencies { testCompile project(path: ':modules:parent-join', configuration: 'runtime') testCompile project(path: ':modules:lang-mustache', configuration: 'runtime') testCompile project(path: ':modules:analysis-common', configuration: 'runtime') - testCompile(project(':x-pack:license-tools')) { - transitive = false + testCompile ("org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}") + + if (isEclipse == false || project.path == ":x-pack:plugin:core-tests") { + testCompile(project(':x-pack:license-tools')) { + transitive = false + } } } @@ -68,7 +72,11 @@ processResources { if (licenseKey != null) { println "Using provided license key from ${licenseKey}" } else if (snapshot) { - licenseKey = Paths.get(project.projectDir.path, 'snapshot.key') + if (isEclipse) { + licenseKey = Paths.get(project.projectDir.path, '../../snapshot.key') + } else { + licenseKey = Paths.get(project.projectDir.path, 'snapshot.key') + } } else { throw new IllegalArgumentException('Property license.key must be set for release build') } @@ -87,6 +95,19 @@ forbiddenPatterns { exclude '**/*.zip' } +if (isEclipse) { + // in eclipse the project is under a fake root, we need to change around the source sets + sourceSets { + if (project.path == ":libs:core") { + main.java.srcDirs = ['java'] + main.resources.srcDirs = ['resources'] + } else { + test.java.srcDirs = ['java'] + test.resources.srcDirs = ['resources'] + } + } +} + compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" diff --git a/x-pack/plugin/core/src/main/eclipse-build.gradle b/x-pack/plugin/core/src/main/eclipse-build.gradle new file mode 100644 index 0000000000000..6bc7562f7fded --- /dev/null +++ b/x-pack/plugin/core/src/main/eclipse-build.gradle @@ -0,0 +1,2 @@ +// this is just shell gradle file for eclipse to have separate projects for core src and tests +apply from: '../../build.gradle' diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 62ffd76e8ea05..e39b5b7dcc196 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -780,22 +780,4 @@ public Builder validate() { } } - /** - * Returns true iff the license is a production licnese - */ - public boolean isProductionLicense() { - switch (operationMode()) { - case MISSING: - case TRIAL: - case BASIC: - return false; - case STANDARD: - case GOLD: - case PLATINUM: - return true; - default: - throw new AssertionError("unknown operation mode: " + operationMode()); - - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index 837caf2da070b..f750d1349a0ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -218,10 +218,13 @@ public void registerLicense(final PutLicenseRequest request, final ActionListene } } + // This check would be incorrect if "basic" licenses were allowed here + // because the defaults there mean that security can be "off", even if the setting is "on" + // BUT basic licenses are explicitly excluded earlier in this method, so we don't need to worry if (XPackSettings.SECURITY_ENABLED.get(settings)) { // TODO we should really validate that all nodes have xpack installed and are consistently configured but this // should happen on a different level and not in this code - if (newLicense.isProductionLicense() + if (XPackLicenseState.isTransportTlsRequired(newLicense, settings) && XPackSettings.TRANSPORT_SSL_ENABLED.get(settings) == false && isProductionMode(settings, clusterService.localNode())) { // security is on but TLS is not configured we gonna fail the entire request and throw an exception diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetaData.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetaData.java index 6d001dea516ac..f131b24252e5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetaData.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensesMetaData.java @@ -166,13 +166,11 @@ public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeBoolean(true); // has a license license.writeTo(streamOutput); } - if (streamOutput.getVersion().onOrAfter(Version.V_6_1_0)) { - if (trialVersion == null) { - streamOutput.writeBoolean(false); - } else { - streamOutput.writeBoolean(true); - Version.writeVersion(trialVersion, streamOutput); - } + if (trialVersion == null) { + streamOutput.writeBoolean(false); + } else { + streamOutput.writeBoolean(true); + Version.writeVersion(trialVersion, streamOutput); } } @@ -182,11 +180,9 @@ public LicensesMetaData(StreamInput streamInput) throws IOException { } else { license = LICENSE_TOMBSTONE; } - if (streamInput.getVersion().onOrAfter(Version.V_6_1_0)) { - boolean hasExercisedTrial = streamInput.readBoolean(); - if (hasExercisedTrial) { - this.trialVersion = Version.readVersion(streamInput); - } + boolean hasExercisedTrial = streamInput.readBoolean(); + if (hasExercisedTrial) { + this.trialVersion = Version.readVersion(streamInput); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 131069d27f628..e206ed3db5149 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -282,7 +282,7 @@ private static class Status { public XPackLicenseState(Settings settings) { this.listeners = new CopyOnWriteArrayList<>(); this.isSecurityEnabled = XPackSettings.SECURITY_ENABLED.get(settings); - this.isSecurityExplicitlyEnabled = isSecurityEnabled && settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()); + this.isSecurityExplicitlyEnabled = isSecurityEnabled && isSecurityExplicitlyEnabled(settings); } private XPackLicenseState(XPackLicenseState xPackLicenseState) { @@ -292,6 +292,10 @@ private XPackLicenseState(XPackLicenseState xPackLicenseState) { this.status = xPackLicenseState.status; } + private static boolean isSecurityExplicitlyEnabled(Settings settings) { + return settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()); + } + /** * Updates the current state of the license, which will change what features are available. * @@ -727,6 +731,25 @@ public synchronized boolean isSecurityDisabledByLicenseDefaults() { return false; } + public static boolean isTransportTlsRequired(License license, Settings settings) { + if (license == null) { + return false; + } + switch (license.operationMode()) { + case STANDARD: + case GOLD: + case PLATINUM: + return XPackSettings.SECURITY_ENABLED.get(settings); + case BASIC: + return XPackSettings.SECURITY_ENABLED.get(settings) && isSecurityExplicitlyEnabled(settings); + case MISSING: + case TRIAL: + return false; + default: + throw new AssertionError("unknown operation mode [" + license.operationMode() + "]"); + } + } + private static boolean isSecurityEnabled(final OperationMode mode, final boolean isSecurityExplicitlyEnabled, final boolean isSecurityEnabled) { switch (mode) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 0eeb173b8b84e..848aacd621111 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.core; -import org.apache.logging.log4j.LogManager; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.xpack.core.security.SecurityField; @@ -15,13 +15,10 @@ import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.VerificationMode; -import javax.crypto.Cipher; import javax.crypto.SecretKeyFactory; -import javax.net.ssl.SSLContext; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -38,7 +35,6 @@ private XPackSettings() { throw new IllegalStateException("Utility class should not be instantiated"); } - /** * Setting for controlling whether or not CCR is enabled. */ @@ -122,28 +118,34 @@ private XPackSettings() { * SSL settings. These are the settings that are specifically registered for SSL. Many are private as we do not explicitly use them * but instead parse based on a prefix (eg *.ssl.*) */ - public static final List DEFAULT_CIPHERS; - - static { - List ciphers = Arrays.asList("TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA256", - "TLS_RSA_WITH_AES_128_CBC_SHA"); - try { - final boolean use256Bit = Cipher.getMaxAllowedKeyLength("AES") > 128; - if (use256Bit) { - List strongerCiphers = new ArrayList<>(ciphers.size() * 2); - strongerCiphers.addAll(Arrays.asList("TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_RSA_WITH_AES_256_CBC_SHA")); - strongerCiphers.addAll(ciphers); - ciphers = strongerCiphers; - } - } catch (NoSuchAlgorithmException e) { - // ignore it here - there will be issues elsewhere and its not nice to throw in a static initializer - } - - DEFAULT_CIPHERS = ciphers; - } + private static final List JDK11_CIPHERS = List.of( + "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256", // TLSv1.3 cipher has PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256", // AEAD, hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256", // hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA"); // hardware support + + private static final List JDK12_CIPHERS = List.of( + "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256", // TLSv1.3 cipher has PFS, AEAD, hardware support + "TLS_CHACHA20_POLY1305_SHA256", // TLSv1.3 cipher has PFS, AEAD + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", // PFS, AEAD, hardware support + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", // PFS, AEAD + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", // PFS, hardware support + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", // PFS, hardware support + "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256", // AEAD, hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA256", "TLS_RSA_WITH_AES_128_CBC_SHA256", // hardware support + "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_AES_128_CBC_SHA"); // hardware support + + public static final List DEFAULT_CIPHERS = + JavaVersion.current().compareTo(JavaVersion.parse("12")) > -1 ? JDK12_CIPHERS : JDK11_CIPHERS; /* * Do not allow insecure hashing algorithms to be used for password hashing @@ -164,19 +166,7 @@ private XPackSettings() { } }, Setting.Property.NodeScope); - public static final List DEFAULT_SUPPORTED_PROTOCOLS; - - static { - boolean supportsTLSv13 = false; - try { - SSLContext.getInstance("TLSv1.3"); - supportsTLSv13 = true; - } catch (NoSuchAlgorithmException e) { - LogManager.getLogger(XPackSettings.class).debug("TLSv1.3 is not supported", e); - } - DEFAULT_SUPPORTED_PROTOCOLS = supportsTLSv13 ? - Arrays.asList("TLSv1.3", "TLSv1.2", "TLSv1.1") : Arrays.asList("TLSv1.2", "TLSv1.1"); - } + public static final List DEFAULT_SUPPORTED_PROTOCOLS = List.of("TLSv1.3", "TLSv1.2", "TLSv1.1"); public static final SSLClientAuth CLIENT_AUTH_DEFAULT = SSLClientAuth.REQUIRED; public static final SSLClientAuth HTTP_CLIENT_AUTH_DEFAULT = SSLClientAuth.NONE; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index 71bf14cdeb4a5..c61ed2ddde8be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -27,6 +27,7 @@ public final class DataFrameField { public static final ParseField SOURCE = new ParseField("source"); public static final ParseField DESTINATION = new ParseField("dest"); public static final ParseField FORCE = new ParseField("force"); + public static final ParseField MAX_PAGE_SEARCH_SIZE = new ParseField("max_page_search_size"); /** * Fields for checkpointing diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java index 6b7de0ab80f3a..715fa0f5dc78b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformAction.java @@ -7,25 +7,18 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.support.tasks.BaseTasksRequest; -import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.utils.ExceptionsHelper; import java.io.IOException; -import java.util.Collections; -import java.util.List; import java.util.Objects; -public class DeleteDataFrameTransformAction extends Action { +public class DeleteDataFrameTransformAction extends Action { public static final DeleteDataFrameTransformAction INSTANCE = new DeleteDataFrameTransformAction(); public static final String NAME = "cluster:admin/data_frame/delete"; @@ -35,17 +28,21 @@ private DeleteDataFrameTransformAction() { } @Override - public Response newResponse() { + public AcknowledgedResponse newResponse() { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @Override - public Writeable.Reader getResponseReader() { - return Response::new; + public Writeable.Reader getResponseReader() { + return in -> { + AcknowledgedResponse response = new AcknowledgedResponse(); + response.readFrom(in); + return response; + }; } - public static class Request extends BaseTasksRequest { - private final String id; + public static class Request extends MasterNodeRequest { + private String id; public Request(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); @@ -60,11 +57,6 @@ public String getId() { return id; } - @Override - public boolean match(Task task) { - return task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -94,59 +86,4 @@ public boolean equals(Object obj) { return Objects.equals(id, other.id); } } - - public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { - - private final boolean acknowledged; - - public Response(StreamInput in) throws IOException { - super(in); - acknowledged = in.readBoolean(); - } - - public Response(boolean acknowledged, List taskFailures, List nodeFailures) { - super(taskFailures, nodeFailures); - this.acknowledged = acknowledged; - } - - public Response(boolean acknowledged) { - this(acknowledged, Collections.emptyList(), Collections.emptyList()); - } - - public boolean isDeleted() { - return acknowledged; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBoolean(acknowledged); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - toXContentCommon(builder, params); - builder.field("acknowledged", acknowledged); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - DeleteDataFrameTransformAction.Response response = (DeleteDataFrameTransformAction.Response) o; - return super.equals(o) && acknowledged == response.acknowledged; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), acknowledged); - } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java index 0f6cc63f98851..2608fb87761f9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/PutDataFrameTransformAction.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class PutDataFrameTransformAction extends Action { public static final PutDataFrameTransformAction INSTANCE = new PutDataFrameTransformAction(); @@ -53,7 +55,19 @@ public static Request fromXContent(final XContentParser parser, final String id) @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + if(config.getPivotConfig() != null + && config.getPivotConfig().getMaxPageSearchSize() != null + && (config.getPivotConfig().getMaxPageSearchSize() < 10 || config.getPivotConfig().getMaxPageSearchSize() > 10_000)) { + validationException = addValidationError( + "pivot.max_page_search_size [" + + config.getPivotConfig().getMaxPageSearchSize() + "] must be greater than 10 and less than 10,000", + validationException); + } + for(String failure : config.getPivotConfig().aggFieldValidation()) { + validationException = addValidationError(failure, validationException); + } + return validationException; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java index e7a43f252d637..99699c3a48cb5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/action/StopDataFrameTransformAction.java @@ -5,8 +5,10 @@ */ package org.elasticsearch.xpack.core.dataframe.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.common.Nullable; @@ -24,6 +26,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -167,6 +170,13 @@ public Response(boolean stopped) { this.stopped = stopped; } + public Response(List taskFailures, + List nodeFailures, + boolean stopped) { + super(taskFailures, nodeFailures); + this.stopped = stopped; + } + public boolean isStopped() { return stopped; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java index c2981c40dfdc1..8f83fd375490d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameIndexerTransformStats.java @@ -109,15 +109,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(transformId); } - /** - * Get the persisted stats document name from the Data Frame Transformer Id. - * - * @return The id of document the where the transform stats are persisted - */ - public static String documentId(String transformId) { - return NAME + "-" + transformId; - } - @Nullable public String getTransformId() { return transformId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformProgress.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformProgress.java index 5b7346bca2a38..0741be296ed4d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformProgress.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformProgress.java @@ -23,9 +23,9 @@ public class DataFrameTransformProgress implements Writeable, ToXContentObject { - private static final ParseField TOTAL_DOCS = new ParseField("total_docs"); - private static final ParseField DOCS_REMAINING = new ParseField("docs_remaining"); - private static final String PERCENT_COMPLETE = "percent_complete"; + public static final ParseField TOTAL_DOCS = new ParseField("total_docs"); + public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining"); + public static final String PERCENT_COMPLETE = "percent_complete"; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "data_frame_transform_progress", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformState.java index bc1b710cd2e6f..d4480caa0b9a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformState.java @@ -42,12 +42,12 @@ public class DataFrameTransformState implements Task.Status, PersistentTaskState @Nullable private final String reason; - private static final ParseField TASK_STATE = new ParseField("task_state"); - private static final ParseField INDEXER_STATE = new ParseField("indexer_state"); - private static final ParseField CURRENT_POSITION = new ParseField("current_position"); - private static final ParseField CHECKPOINT = new ParseField("checkpoint"); - private static final ParseField REASON = new ParseField("reason"); - private static final ParseField PROGRESS = new ParseField("progress"); + public static final ParseField TASK_STATE = new ParseField("task_state"); + public static final ParseField INDEXER_STATE = new ParseField("indexer_state"); + public static final ParseField CURRENT_POSITION = new ParseField("current_position"); + public static final ParseField CHECKPOINT = new ParseField("checkpoint"); + public static final ParseField REASON = new ParseField("reason"); + public static final ParseField PROGRESS = new ParseField("progress"); @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java index 2a145ba260f4e..d28d64bdb1e82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/DataFrameTransformStateAndStats.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.dataframe.transforms; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -14,6 +15,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -22,7 +24,7 @@ public class DataFrameTransformStateAndStats implements Writeable, ToXContentObject { - private static final String NAME = "data_frame_transform_state_and_stats"; + public static final String NAME = "data_frame_transform_state_and_stats"; public static final ParseField STATE_FIELD = new ParseField("state"); public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing"); @@ -47,6 +49,10 @@ public class DataFrameTransformStateAndStats implements Writeable, ToXContentObj (p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD); } + public static DataFrameTransformStateAndStats fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + public static DataFrameTransformStateAndStats initialStateAndStats(String id) { return initialStateAndStats(id, new DataFrameIndexerTransformStats(id)); } @@ -58,6 +64,15 @@ public static DataFrameTransformStateAndStats initialStateAndStats(String id, Da DataFrameTransformCheckpointingInfo.EMPTY); } + /** + * Get the persisted state and stats document name from the Data Frame Transform Id. + * + * @return The id of document the where the transform stats are persisted + */ + public static String documentId(String transformId) { + return NAME + "-" + transformId; + } + public DataFrameTransformStateAndStats(String id, DataFrameTransformState state, DataFrameIndexerTransformStats stats, DataFrameTransformCheckpointingInfo checkpointingInfo) { this.id = Objects.requireNonNull(id); @@ -73,6 +88,11 @@ public DataFrameTransformStateAndStats(StreamInput in) throws IOException { this.checkpointingInfo = new DataFrameTransformCheckpointingInfo(in); } + @Nullable + public String getTransformId() { + return transformStats.getTransformId(); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -80,6 +100,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(STATE_FIELD.getPreferredName(), transformState, params); builder.field(DataFrameField.STATS_FIELD.getPreferredName(), transformStats, params); builder.field(CHECKPOINTING_INFO_FIELD.getPreferredName(), checkpointingInfo, params); + if (params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false)) { + builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME); + } builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfig.java index c1c894e2971ae..ab2f7d489ac9a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfig.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.dataframe.transforms.pivot; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -13,11 +14,17 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.utils.ExceptionsHelper; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Map.Entry; import java.util.Objects; @@ -29,6 +36,7 @@ public class PivotConfig implements Writeable, ToXContentObject { private static final String NAME = "data_frame_transform_pivot"; private final GroupConfig groups; private final AggregationConfig aggregationConfig; + private final Integer maxPageSearchSize; private static final ConstructingObjectParser STRICT_PARSER = createParser(false); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); @@ -55,7 +63,7 @@ private static ConstructingObjectParser createParser(boolean throw new IllegalArgumentException("Required [aggregations]"); } - return new PivotConfig(groups, aggregationConfig); + return new PivotConfig(groups, aggregationConfig, (Integer)args[3]); }); parser.declareObject(constructorArg(), @@ -63,18 +71,21 @@ private static ConstructingObjectParser createParser(boolean parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGREGATIONS); parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGS); + parser.declareInt(optionalConstructorArg(), DataFrameField.MAX_PAGE_SEARCH_SIZE); return parser; } - public PivotConfig(final GroupConfig groups, final AggregationConfig aggregationConfig) { + public PivotConfig(final GroupConfig groups, final AggregationConfig aggregationConfig, Integer maxPageSearchSize) { this.groups = ExceptionsHelper.requireNonNull(groups, DataFrameField.GROUP_BY.getPreferredName()); this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, DataFrameField.AGGREGATIONS.getPreferredName()); + this.maxPageSearchSize = maxPageSearchSize; } public PivotConfig(StreamInput in) throws IOException { this.groups = new GroupConfig(in); this.aggregationConfig = new AggregationConfig(in); + this.maxPageSearchSize = in.readOptionalInt(); } @Override @@ -82,6 +93,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(DataFrameField.GROUP_BY.getPreferredName(), groups); builder.field(DataFrameField.AGGREGATIONS.getPreferredName(), aggregationConfig); + if (maxPageSearchSize != null) { + builder.field(DataFrameField.MAX_PAGE_SEARCH_SIZE.getPreferredName(), maxPageSearchSize); + } builder.endObject(); return builder; } @@ -107,6 +121,7 @@ public void toCompositeAggXContent(XContentBuilder builder, Params params) throw public void writeTo(StreamOutput out) throws IOException { groups.writeTo(out); aggregationConfig.writeTo(out); + out.writeOptionalInt(maxPageSearchSize); } public AggregationConfig getAggregationConfig() { @@ -117,6 +132,11 @@ public GroupConfig getGroupConfig() { return groups; } + @Nullable + public Integer getMaxPageSearchSize() { + return maxPageSearchSize; + } + @Override public boolean equals(Object other) { if (this == other) { @@ -129,19 +149,77 @@ public boolean equals(Object other) { final PivotConfig that = (PivotConfig) other; - return Objects.equals(this.groups, that.groups) && Objects.equals(this.aggregationConfig, that.aggregationConfig); + return Objects.equals(this.groups, that.groups) + && Objects.equals(this.aggregationConfig, that.aggregationConfig) + && Objects.equals(this.maxPageSearchSize, that.maxPageSearchSize); } @Override public int hashCode() { - return Objects.hash(groups, aggregationConfig); + return Objects.hash(groups, aggregationConfig, maxPageSearchSize); } public boolean isValid() { return groups.isValid() && aggregationConfig.isValid(); } + public List aggFieldValidation() { + if ((aggregationConfig.isValid() && groups.isValid()) == false) { + return Collections.emptyList(); + } + List usedNames = new ArrayList<>(); + // TODO this will need to change once we allow multi-bucket aggs + field merging + aggregationConfig.getAggregatorFactories().forEach(agg -> addAggNames(agg, usedNames)); + aggregationConfig.getPipelineAggregatorFactories().forEach(agg -> addAggNames(agg, usedNames)); + usedNames.addAll(groups.getGroups().keySet()); + return aggFieldValidation(usedNames); + } + public static PivotConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } + + /** + * Does the following checks: + * + * - determines if there are any full duplicate names between the aggregation names and the group by names. + * - finds if there are conflicting name paths that could cause a failure later when the config is started. + * + * Examples showing conflicting field name paths: + * + * aggName1: foo.bar.baz + * aggName2: foo.bar + * + * This should fail as aggName1 will cause foo.bar to be an object, causing a conflict with the use of foo.bar in aggName2. + * @param usedNames The aggregation and group_by names + * @return List of validation failure messages + */ + static List aggFieldValidation(List usedNames) { + if (usedNames == null || usedNames.isEmpty()) { + return Collections.emptyList(); + } + List validationFailures = new ArrayList<>(); + + usedNames.sort(String::compareTo); + for (int i = 0; i < usedNames.size() - 1; i++) { + if (usedNames.get(i+1).startsWith(usedNames.get(i) + ".")) { + validationFailures.add("field [" + usedNames.get(i) + "] cannot be both an object and a field"); + } + if (usedNames.get(i+1).equals(usedNames.get(i))) { + validationFailures.add("duplicate field [" + usedNames.get(i) + "] detected"); + } + } + return validationFailures; + } + + + private static void addAggNames(AggregationBuilder aggregationBuilder, Collection names) { + names.add(aggregationBuilder.getName()); + aggregationBuilder.getSubAggregations().forEach(agg -> addAggNames(agg, names)); + aggregationBuilder.getPipelineAggregations().forEach(agg -> addAggNames(agg, names)); + } + + private static void addAggNames(PipelineAggregationBuilder pipelineAggregationBuilder, Collection names) { + names.add(pipelineAggregationBuilder.getName()); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java index ec7e0de9e34fc..80b0378ae35ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexer.java @@ -16,15 +16,18 @@ import java.util.List; import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; /** * An abstract class that builds an index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, * it will create the index from the source index up to the last complete bucket that is allowed to be built (based on job position). * Only one background job can run simultaneously and {@link #onFinish} is called when the job - * finishes. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} is called if the indexer is - * aborted while a job is running. The indexer must be started ({@link #start()} to allow a background job to run when - * {@link #maybeTriggerAsyncJob(long)} is called. {@link #stop()} can be used to stop the background job without aborting the indexer. + * finishes. {@link #onStop()} is called after the current search returns when the job is stopped early via a call + * to {@link #stop()}. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} + * is called if the indexer is aborted while a job is running. The indexer must be started ({@link #start()} + * to allow a background job to run when {@link #maybeTriggerAsyncJob(long)} is called. + * {@link #stop()} can be used to stop the background job without aborting the indexer. * * In a nutshell this is a 2 cycle engine: 1st it sends a query, 2nd it indexes documents based on the response, sends the next query, * indexes, queries, indexes, ... until a condition lets the engine pause until the source provides new input. @@ -84,21 +87,29 @@ public synchronized IndexerState start() { /** * Sets the internal state to {@link IndexerState#STOPPING} if an async job is - * running in the background. If there is no job running when this function is - * called, the state is directly set to {@link IndexerState#STOPPED}. + * running in the background, {@link #onStop()} will be called when the background job + * detects that the indexer is stopped. + * If there is no job running when this function is called + * the state is set to {@link IndexerState#STOPPED} and {@link #onStop()} called directly. * * @return The new state for the indexer (STOPPED, STOPPING or ABORTING if the job was already aborted). */ public synchronized IndexerState stop() { + AtomicBoolean wasStartedAndSetStopped = new AtomicBoolean(false); IndexerState currentState = state.updateAndGet(previousState -> { if (previousState == IndexerState.INDEXING) { return IndexerState.STOPPING; } else if (previousState == IndexerState.STARTED) { + wasStartedAndSetStopped.set(true); return IndexerState.STOPPED; } else { return previousState; } }); + + if (wasStartedAndSetStopped.get()) { + onStop(); + } return currentState; } @@ -251,6 +262,14 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { */ protected abstract void onFinish(ActionListener listener); + /** + * Called when the indexer is stopped. This is only called when the indexer is stopped + * via {@link #stop()} as opposed to {@link #onFinish(ActionListener)} which is called + * when the indexer's work is done. + */ + protected void onStop() { + } + /** * Called when a background job detects that the indexer is aborted causing the * async execution to stop. @@ -276,6 +295,7 @@ private IndexerState finishAndSetState() { case STOPPING: // must be started again + onStop(); return IndexerState.STOPPED; case ABORTING: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocateAction.java index 7843fa7d86e0c..cbfec7b91029f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocateAction.java @@ -149,13 +149,6 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { return Arrays.asList(allocateStep, routedCheckStep); } - @Override - public List toStepKeys(String phase) { - StepKey allocateKey = new StepKey(phase, NAME, NAME); - StepKey allocationRoutedKey = new StepKey(phase, NAME, AllocationRoutedStep.NAME); - return Arrays.asList(allocateKey, allocationRoutedKey); - } - @Override public int hashCode() { return Objects.hash(numberOfReplicas, include, exclude, require); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/DeleteAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/DeleteAction.java index b61534e497067..ae8eaef670916 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/DeleteAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/DeleteAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; import java.io.IOException; import java.util.Arrays; @@ -67,14 +66,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) return Arrays.asList(waitForNoFollowersStep, deleteStep); } - @Override - public List toStepKeys(String phase) { - Step.StepKey waitForNoFollowerStepKey = new Step.StepKey(phase, NAME, WaitForNoFollowersStep.NAME); - Step.StepKey deleteStepKey = new Step.StepKey(phase, NAME, DeleteStep.NAME); - - return Arrays.asList(waitForNoFollowerStepKey, deleteStepKey); - } - @Override public int hashCode() { return 1; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ForceMergeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ForceMergeAction.java index 2c4508a8355f0..ace29f6b465ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ForceMergeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ForceMergeAction.java @@ -98,14 +98,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) return Arrays.asList(readOnlyStep, forceMergeStep, segmentCountStep); } - @Override - public List toStepKeys(String phase) { - StepKey readOnlyKey = new StepKey(phase, NAME, ReadOnlyAction.NAME); - StepKey forceMergeKey = new StepKey(phase, NAME, ForceMergeStep.NAME); - StepKey countKey = new StepKey(phase, NAME, SegmentCountStep.NAME); - return Arrays.asList(readOnlyKey, forceMergeKey, countKey); - } - @Override public int hashCode() { return Objects.hash(maxNumSegments); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java index 63dbedadd4fe4..7cffaed80917d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/FreezeAction.java @@ -64,12 +64,6 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { return Arrays.asList(freezeStep); } - @Override - public List toStepKeys(String phase) { - StepKey freezeStepKey = new StepKey(phase, NAME, FreezeStep.NAME); - return Arrays.asList(freezeStepKey); - } - @Override public int hashCode() { return 1; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/LifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/LifecycleAction.java index 3e84813274d83..d6ef78496bb4d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/LifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/LifecycleAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; import java.util.List; @@ -30,15 +29,6 @@ public interface LifecycleAction extends ToXContentObject, NamedWriteable { */ List toSteps(Client client, String phase, @Nullable Step.StepKey nextStepKey); - /** - * - * @param phase - * the name of the phase this action is being executed within - * @return the {@link StepKey}s for the steps which will be executed in this - * action - */ - List toStepKeys(String phase); - /** * @return true if this action is considered safe. An action is not safe if * it will produce unwanted side effects or will get stuck when the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java index e338d75a98f82..ea34886508030 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ReadOnlyAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; import java.io.IOException; import java.util.Collections; @@ -65,11 +64,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) Settings readOnlySettings = Settings.builder().put(IndexMetaData.SETTING_BLOCKS_WRITE, true).build(); return Collections.singletonList(new UpdateSettingsStep(key, nextStepKey, client, readOnlySettings)); } - - @Override - public List toStepKeys(String phase) { - return Collections.singletonList(new Step.StepKey(phase, NAME, NAME)); - } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/RolloverAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/RolloverAction.java index 25346fefa3149..280f8561c35f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/RolloverAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/RolloverAction.java @@ -151,15 +151,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) return Arrays.asList(waitForRolloverReadyStep, rolloverStep, updateDateStep, setIndexingCompleteStep); } - @Override - public List toStepKeys(String phase) { - StepKey rolloverReadyStepKey = new StepKey(phase, NAME, WaitForRolloverReadyStep.NAME); - StepKey rolloverStepKey = new StepKey(phase, NAME, RolloverStep.NAME); - StepKey updateDateStepKey = new StepKey(phase, NAME, UpdateRolloverLifecycleDateStep.NAME); - StepKey setIndexingCompleteStepKey = new StepKey(phase, NAME, INDEXING_COMPLETE_STEP_NAME); - return Arrays.asList(rolloverReadyStepKey, rolloverStepKey, updateDateStepKey, setIndexingCompleteStepKey); - } - @Override public int hashCode() { return Objects.hash(maxSize, maxAge, maxDocs); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java index 507da4613e22a..1bc09e7f42ebf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetPriorityAction.java @@ -90,11 +90,6 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { return Collections.singletonList(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); } - @Override - public List toStepKeys(String phase) { - return Collections.singletonList(new StepKey(phase, NAME, NAME)); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ShrinkAction.java index c1b3fb2422965..5fc4b06ec57b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/ShrinkAction.java @@ -111,22 +111,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) shrink, allocated, copyMetadata, aliasSwapAndDelete, waitOnShrinkTakeover); } - @Override - public List toStepKeys(String phase) { - StepKey conditionalSkipKey = new StepKey(phase, NAME, BranchingStep.NAME); - StepKey waitForNoFollowerStepKey = new StepKey(phase, NAME, WaitForNoFollowersStep.NAME); - StepKey readOnlyKey = new StepKey(phase, NAME, ReadOnlyAction.NAME); - StepKey setSingleNodeKey = new StepKey(phase, NAME, SetSingleNodeAllocateStep.NAME); - StepKey checkShrinkReadyKey = new StepKey(phase, NAME, CheckShrinkReadyStep.NAME); - StepKey shrinkKey = new StepKey(phase, NAME, ShrinkStep.NAME); - StepKey enoughShardsKey = new StepKey(phase, NAME, ShrunkShardsAllocatedStep.NAME); - StepKey copyMetadataKey = new StepKey(phase, NAME, CopyExecutionStateStep.NAME); - StepKey aliasKey = new StepKey(phase, NAME, ShrinkSetAliasStep.NAME); - StepKey isShrunkIndexKey = new StepKey(phase, NAME, ShrunkenIndexCheckStep.NAME); - return Arrays.asList(conditionalSkipKey, waitForNoFollowerStepKey, readOnlyKey, setSingleNodeKey, checkShrinkReadyKey, shrinkKey, - enoughShardsKey, copyMetadataKey, aliasKey, isShrunkIndexKey); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java index 20a0fb75b9daa..d8d855b0af2ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/UnfollowAction.java @@ -54,19 +54,6 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { return Arrays.asList(step1, step2, step3, step4, step5, step6, step7); } - @Override - public List toStepKeys(String phase) { - StepKey indexingCompleteStep = new StepKey(phase, NAME, WaitForIndexingCompleteStep.NAME); - StepKey waitForFollowShardTasksStep = new StepKey(phase, NAME, WaitForFollowShardTasksStep.NAME); - StepKey pauseFollowerIndexStep = new StepKey(phase, NAME, PauseFollowerIndexStep.NAME); - StepKey closeFollowerIndexStep = new StepKey(phase, NAME, CloseFollowerIndexStep.NAME); - StepKey unfollowIndexStep = new StepKey(phase, NAME, UnfollowFollowIndexStep.NAME); - StepKey openFollowerIndexStep = new StepKey(phase, NAME, OpenFollowerIndexStep.NAME); - StepKey waitForYellowStep = new StepKey(phase, NAME, WaitForYellowStep.NAME); - return Arrays.asList(indexingCompleteStep, waitForFollowShardTasksStep, pauseFollowerIndexStep, - closeFollowerIndexStep, unfollowIndexStep, openFollowerIndexStep, waitForYellowStep); - } - @Override public boolean isSafeAction() { // There are no settings to change, so therefor this action should be safe: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java index 755d6faef0ba2..da38d1d2903a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -44,11 +43,7 @@ public MachineLearningFeatureSetUsage(StreamInput in) throws IOException { super(in); this.jobsUsage = in.readMap(); this.datafeedsUsage = in.readMap(); - if (in.getVersion().onOrAfter(Version.V_6_5_0)) { - this.nodeCount = in.readInt(); - } else { - this.nodeCount = -1; - } + this.nodeCount = in.readInt(); } @Override @@ -56,9 +51,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeMap(jobsUsage); out.writeMap(datafeedsUsage); - if (out.getVersion().onOrAfter(Version.V_6_5_0)) { - out.writeInt(nodeCount); - } + out.writeInt(nodeCount); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 2ad999d82ade0..0f502577195dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -106,7 +106,7 @@ public boolean isUpgradeMode() { @Override public Version getMinimalSupportedVersion() { - return Version.V_6_0_0_alpha1; + return Version.CURRENT.minimumIndexCompatibilityVersion(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index 9bc413b2e220f..95ec597bb9cd8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.tasks.BaseTasksRequest; @@ -91,9 +90,7 @@ public Request(StreamInput in) throws IOException { force = in.readBoolean(); openJobIds = in.readStringArray(); local = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoJobs = in.readBoolean(); - } + allowNoJobs = in.readBoolean(); } @Override @@ -104,9 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(force); out.writeStringArray(openJobIds); out.writeBoolean(local); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoJobs); - } + out.writeBoolean(allowNoJobs); } public Request(String jobId) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 59589fa34ef9d..950fa58af95c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; @@ -58,18 +57,14 @@ public Request() { public Request(StreamInput in) throws IOException { super(in); datafeedId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoDatafeeds = in.readBoolean(); - } + allowNoDatafeeds = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(datafeedId); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoDatafeeds); - } + out.writeBoolean(allowNoDatafeeds); } public String getDatafeedId() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 6dbb86fbcd082..39055501444f5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; @@ -62,18 +61,14 @@ public Request() {} public Request(StreamInput in) throws IOException { super(in); datafeedId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoDatafeeds = in.readBoolean(); - } + allowNoDatafeeds = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(datafeedId); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoDatafeeds); - } + out.writeBoolean(allowNoDatafeeds); } public String getDatafeedId() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 18428eff13758..98b1eb7a118f9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; @@ -56,18 +55,14 @@ public Request() { public Request(StreamInput in) throws IOException { super(in); jobId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoJobs = in.readBoolean(); - } + allowNoJobs = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(jobId); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoJobs); - } + out.writeBoolean(allowNoJobs); } public void setAllowNoJobs(boolean allowNoJobs) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index d4c7124af3238..17de9dfc3522c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -85,9 +85,7 @@ public Request(StreamInput in) throws IOException { super(in); jobId = in.readString(); expandedJobsIds = in.readStringList(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoJobs = in.readBoolean(); - } + allowNoJobs = in.readBoolean(); } @Override @@ -95,9 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(jobId); out.writeStringCollection(expandedJobsIds); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoJobs); - } + out.writeBoolean(allowNoJobs); } public List getExpandedJobsIds() { return expandedJobsIds; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index c914150173b4a..0021040c69801 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; @@ -96,9 +95,7 @@ public Request(StreamInput in) throws IOException { resolvedStartedDatafeedIds = in.readStringArray(); stopTimeout = in.readTimeValue(); force = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - allowNoDatafeeds = in.readBoolean(); - } + allowNoDatafeeds = in.readBoolean(); } public String getDatafeedId() { @@ -160,9 +157,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(resolvedStartedDatafeedIds); out.writeTimeValue(stopTimeout); out.writeBoolean(force); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeBoolean(allowNoDatafeeds); - } + out.writeBoolean(allowNoDatafeeds); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java index 8585e4122e673..1c39c6d985d45 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/AggProvider.java @@ -7,8 +7,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -68,14 +66,7 @@ static AggProvider fromParsedAggs(AggregatorFactories.Builder parsedAggs) throws } static AggProvider fromStream(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers - return new AggProvider(in.readMap(), in.readOptionalWriteable(AggregatorFactories.Builder::new), in.readException()); - } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects - return new AggProvider(in.readMap(), null, null); - } else { // only supports eagerly parsed objects - // Upstream, we have read the bool already and know for sure that we have parsed aggs in the stream - return AggProvider.fromParsedAggs(new AggregatorFactories.Builder(in)); - } + return new AggProvider(in.readMap(), in.readOptionalWriteable(AggregatorFactories.Builder::new), in.readException()); } AggProvider(Map aggs, AggregatorFactories.Builder parsedAggs, Exception parsingException) { @@ -92,29 +83,9 @@ static AggProvider fromStream(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers - out.writeMap(aggs); - out.writeOptionalWriteable(parsedAggs); - out.writeException(parsingException); - } else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects - // We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as - // they already have the ability to fully parse the passed Maps - out.writeMap(aggs); - } else { // only supports eagerly parsed objects - if (parsingException != null) { - if (parsingException instanceof IOException) { - throw (IOException) parsingException; - } else { - throw new ElasticsearchException(parsingException); - } - } else if (parsedAggs == null) { - // This is an admittedly rare case but we should fail early instead of writing null when there - // actually are aggregations defined - throw new ElasticsearchException("Unsupported operation: parsed aggregations are null"); - } - // Upstream we already verified that this calling object is not null, no need to write a second boolean to the stream - parsedAggs.writeTo(out); - } + out.writeMap(aggs); + out.writeOptionalWriteable(parsedAggs); + out.writeException(parsingException); } public Exception getParsingException() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java index ff6d2f595af81..755c5a3526d01 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProvider.java @@ -7,8 +7,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -74,13 +72,7 @@ static QueryProvider fromParsedQuery(QueryBuilder parsedQuery) throws IOExceptio } static QueryProvider fromStream(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers - return new QueryProvider(in.readMap(), in.readOptionalNamedWriteable(QueryBuilder.class), in.readException()); - } else if (in.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects - return new QueryProvider(in.readMap(), null, null); - } else { // only supports eagerly parsed objects - return QueryProvider.fromParsedQuery(in.readNamedWriteable(QueryBuilder.class)); - } + return new QueryProvider(in.readMap(), in.readOptionalNamedWriteable(QueryBuilder.class), in.readException()); } QueryProvider(Map query, QueryBuilder parsedQuery, Exception parsingException) { @@ -95,28 +87,9 @@ static QueryProvider fromStream(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_6_7_0)) { // Has our bug fix for query/agg providers - out.writeMap(query); - out.writeOptionalNamedWriteable(parsedQuery); - out.writeException(parsingException); - } else if (out.getVersion().onOrAfter(Version.V_6_6_0)) { // Has the bug, but supports lazy objects - // We allow the lazy parsing nodes that have the bug throw any parsing errors themselves as - // they already have the ability to fully parse the passed Maps - out.writeMap(query); - } else { // only supports eagerly parsed objects - if (parsingException != null) { // Do we have a parsing error? Throw it - if (parsingException instanceof IOException) { - throw (IOException) parsingException; - } else { - throw new ElasticsearchException(parsingException); - } - } else if (parsedQuery == null) { // Do we have a query defined but not parsed? - // This is an admittedly rare case but we should fail early instead of writing null when there - // actually is a query defined - throw new ElasticsearchException("Unsupported operation: parsed query is null"); - } - out.writeNamedWriteable(parsedQuery); - } + out.writeMap(query); + out.writeOptionalNamedWriteable(parsedQuery); + out.writeException(parsingException); } public Exception getParsingException() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index de37702fe5246..1cb44f9625cb5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -186,11 +186,7 @@ public Job(StreamInput in) throws IOException { jobId = in.readString(); jobType = in.readString(); jobVersion = in.readBoolean() ? Version.readVersion(in) : null; - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - groups = Collections.unmodifiableList(in.readStringList()); - } else { - groups = Collections.emptyList(); - } + groups = Collections.unmodifiableList(in.readStringList()); description = in.readOptionalString(); createTime = new Date(in.readVLong()); finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null; @@ -200,10 +196,6 @@ public Job(StreamInput in) throws IOException { in.readVLong(); } } - // for removed establishedModelMemory field - if (in.getVersion().onOrAfter(Version.V_6_1_0) && in.getVersion().before(Version.V_7_0_0)) { - in.readOptionalLong(); - } analysisConfig = new AnalysisConfig(in); analysisLimits = in.readOptionalWriteable(AnalysisLimits::new); dataDescription = in.readOptionalWriteable(DataDescription::new); @@ -449,9 +441,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeStringCollection(groups); - } + out.writeStringCollection(groups); out.writeOptionalString(description); out.writeVLong(createTime.getTime()); if (finishedTime != null) { @@ -464,10 +454,6 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().before(Version.V_7_0_0)) { out.writeBoolean(false); } - // for removed establishedModelMemory field - if (out.getVersion().onOrAfter(Version.V_6_1_0) && out.getVersion().before(Version.V_7_0_0)) { - out.writeOptionalLong(null); - } analysisConfig.writeTo(out); out.writeOptionalWriteable(analysisLimits); out.writeOptionalWriteable(dataDescription); @@ -676,11 +662,7 @@ public Builder(StreamInput in) throws IOException { id = in.readOptionalString(); jobType = in.readString(); jobVersion = in.readBoolean() ? Version.readVersion(in) : null; - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - groups = in.readStringList(); - } else { - groups = Collections.emptyList(); - } + groups = in.readStringList(); description = in.readOptionalString(); createTime = in.readBoolean() ? new Date(in.readVLong()) : null; finishedTime = in.readBoolean() ? new Date(in.readVLong()) : null; @@ -690,10 +672,6 @@ public Builder(StreamInput in) throws IOException { in.readVLong(); } } - // for removed establishedModelMemory field - if (in.getVersion().onOrAfter(Version.V_6_1_0) && in.getVersion().before(Version.V_7_0_0)) { - in.readOptionalLong(); - } analysisConfig = in.readOptionalWriteable(AnalysisConfig::new); analysisLimits = in.readOptionalWriteable(AnalysisLimits::new); dataDescription = in.readOptionalWriteable(DataDescription::new); @@ -861,9 +839,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - out.writeStringCollection(groups); - } + out.writeStringCollection(groups); out.writeOptionalString(description); if (createTime != null) { out.writeBoolean(true); @@ -881,10 +857,6 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().before(Version.V_7_0_0)) { out.writeBoolean(false); } - // for removed establishedModelMemory field - if (out.getVersion().onOrAfter(Version.V_6_1_0) && out.getVersion().before(Version.V_7_0_0)) { - out.writeOptionalLong(null); - } out.writeOptionalWriteable(analysisConfig); out.writeOptionalWriteable(analysisLimits); out.writeOptionalWriteable(dataDescription); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java index 36e1fc1096675..81a0e017c6584 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java @@ -106,12 +106,8 @@ private JobUpdate(String jobId, @Nullable List groups, @Nullable String public JobUpdate(StreamInput in) throws IOException { jobId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - String[] groupsArray = in.readOptionalStringArray(); - groups = groupsArray == null ? null : Arrays.asList(groupsArray); - } else { - groups = null; - } + String[] groupsArray = in.readOptionalStringArray(); + groups = groupsArray == null ? null : Arrays.asList(groupsArray); description = in.readOptionalString(); if (in.readBoolean()) { detectorUpdates = in.readList(DetectorUpdate::new); @@ -131,10 +127,6 @@ public JobUpdate(StreamInput in) throws IOException { } customSettings = in.readMap(); modelSnapshotId = in.readOptionalString(); - // was establishedModelMemory - if (in.getVersion().onOrAfter(Version.V_6_1_0) && in.getVersion().before(Version.V_7_0_0)) { - in.readOptionalLong(); - } if (in.getVersion().onOrAfter(Version.V_6_3_0) && in.readBoolean()) { jobVersion = Version.readVersion(in); } else { @@ -155,10 +147,8 @@ public JobUpdate(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - String[] groupsArray = groups == null ? null : groups.toArray(new String[groups.size()]); - out.writeOptionalStringArray(groupsArray); - } + String[] groupsArray = groups == null ? null : groups.toArray(new String[groups.size()]); + out.writeOptionalStringArray(groupsArray); out.writeOptionalString(description); out.writeBoolean(detectorUpdates != null); if (detectorUpdates != null) { @@ -176,10 +166,6 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeMap(customSettings); out.writeOptionalString(modelSnapshotId); - // was establishedModelMemory - if (out.getVersion().onOrAfter(Version.V_6_1_0) && out.getVersion().before(Version.V_7_0_0)) { - out.writeOptionalLong(null); - } if (out.getVersion().onOrAfter(Version.V_6_3_0)) { if (jobVersion != null) { out.writeBoolean(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 42ce9eb4273a1..22eb0dc357bed 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -91,8 +91,11 @@ public final class Messages { public static final String JOB_AUDIT_CALENDARS_UPDATED_ON_PROCESS = "Updated calendars in running process"; public static final String JOB_AUDIT_MEMORY_STATUS_SOFT_LIMIT = "Job memory status changed to soft_limit; memory pruning will now be " + "more aggressive"; - public static final String JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT = "Job memory status changed to hard_limit at {0}; adjust the " + - "analysis_limits.model_memory_limit setting to ensure all data is analyzed"; + public static final String JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT = "Job memory status changed to hard_limit; " + + "job exceeded model memory limit {0} by {1}. " + + "Adjust the analysis_limits.model_memory_limit setting to ensure all data is analyzed"; + public static final String JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT_PRE_7_2 = "Job memory status changed to hard_limit at {0}; adjust the " + + "analysis_limits.model_memory_limit setting to ensure all data is analyzed"; public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES = "categorization_filters contain duplicates"; public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY = diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java index f5d7cebbc4c52..f02120433efc4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStats.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.job.process.autodetect.state; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,6 +39,8 @@ public class ModelSizeStats implements ToXContentObject, Writeable { * Field Names */ public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); + public static final ParseField MODEL_BYTES_EXCEEDED_FIELD = new ParseField("model_bytes_exceeded"); + public static final ParseField MODEL_BYTES_MEMORY_LIMIT_FIELD = new ParseField("model_bytes_memory_limit"); public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); @@ -56,6 +59,8 @@ private static ConstructingObjectParser createParser(boolean igno parser.declareString(ConstructingObjectParser.constructorArg(), Job.ID); parser.declareString((modelSizeStat, s) -> {}, Result.RESULT_TYPE); parser.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); + parser.declareLong(Builder::setModelBytesExceeded, MODEL_BYTES_EXCEEDED_FIELD); + parser.declareLong(Builder::setModelBytesMemoryLimit, MODEL_BYTES_MEMORY_LIMIT_FIELD); parser.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); parser.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); parser.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); @@ -100,6 +105,8 @@ public String toString() { private final String jobId; private final long modelBytes; + private final Long modelBytesExceeded; + private final Long modelBytesMemoryLimit; private final long totalByFieldCount; private final long totalOverFieldCount; private final long totalPartitionFieldCount; @@ -108,11 +115,14 @@ public String toString() { private final Date timestamp; private final Date logTime; - private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, long totalOverFieldCount, - long totalPartitionFieldCount, long bucketAllocationFailuresCount, MemoryStatus memoryStatus, + private ModelSizeStats(String jobId, long modelBytes, Long modelBytesExceeded, Long modelBytesMemoryLimit, long totalByFieldCount, + long totalOverFieldCount, long totalPartitionFieldCount, long bucketAllocationFailuresCount, + MemoryStatus memoryStatus, Date timestamp, Date logTime) { this.jobId = jobId; this.modelBytes = modelBytes; + this.modelBytesExceeded = modelBytesExceeded; + this.modelBytesMemoryLimit = modelBytesMemoryLimit; this.totalByFieldCount = totalByFieldCount; this.totalOverFieldCount = totalOverFieldCount; this.totalPartitionFieldCount = totalPartitionFieldCount; @@ -125,6 +135,16 @@ private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, lo public ModelSizeStats(StreamInput in) throws IOException { jobId = in.readString(); modelBytes = in.readVLong(); + if (in.getVersion().onOrAfter(Version.V_7_2_0)) { + modelBytesExceeded = in.readOptionalLong(); + } else { + modelBytesExceeded = null; + } + if (in.getVersion().onOrAfter(Version.V_7_2_0)) { + modelBytesMemoryLimit = in.readOptionalLong(); + } else { + modelBytesMemoryLimit = null; + } totalByFieldCount = in.readVLong(); totalOverFieldCount = in.readVLong(); totalPartitionFieldCount = in.readVLong(); @@ -146,6 +166,12 @@ public static String documentIdPrefix(String jobId) { public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); out.writeVLong(modelBytes); + if (out.getVersion().onOrAfter(Version.V_7_2_0)) { + out.writeOptionalLong(modelBytesExceeded); + } + if (out.getVersion().onOrAfter(Version.V_7_2_0)) { + out.writeOptionalLong(modelBytesMemoryLimit); + } out.writeVLong(totalByFieldCount); out.writeVLong(totalOverFieldCount); out.writeVLong(totalPartitionFieldCount); @@ -171,6 +197,12 @@ public XContentBuilder doXContentBody(XContentBuilder builder) throws IOExceptio builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); + if (modelBytesExceeded != null) { + builder.field(MODEL_BYTES_EXCEEDED_FIELD.getPreferredName(), modelBytesExceeded); + } + if (modelBytesMemoryLimit != null) { + builder.field(MODEL_BYTES_MEMORY_LIMIT_FIELD.getPreferredName(), modelBytesMemoryLimit); + } builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); @@ -192,6 +224,14 @@ public long getModelBytes() { return modelBytes; } + public Long getModelBytesExceeded() { + return modelBytesExceeded; + } + + public Long getModelBytesMemoryLimit() { + return modelBytesMemoryLimit; + } + public long getTotalByFieldCount() { return totalByFieldCount; } @@ -231,8 +271,8 @@ public Date getLogTime() { @Override public int hashCode() { // this.id excluded here as it is generated by the datastore - return Objects.hash(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, - this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + return Objects.hash(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount, + totalPartitionFieldCount, this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); } /** @@ -250,7 +290,9 @@ public boolean equals(Object other) { ModelSizeStats that = (ModelSizeStats) other; - return this.modelBytes == that.modelBytes && this.totalByFieldCount == that.totalByFieldCount + return this.modelBytes == that.modelBytes && Objects.equals(this.modelBytesExceeded, that.modelBytesExceeded) + && Objects.equals(this.modelBytesMemoryLimit, that.modelBytesMemoryLimit) + && this.totalByFieldCount == that.totalByFieldCount && this.totalOverFieldCount == that.totalOverFieldCount && this.totalPartitionFieldCount == that.totalPartitionFieldCount && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount && Objects.equals(this.memoryStatus, that.memoryStatus) && Objects.equals(this.timestamp, that.timestamp) @@ -262,6 +304,8 @@ public static class Builder { private final String jobId; private long modelBytes; + private Long modelBytesExceeded; + private Long modelBytesMemoryLimit; private long totalByFieldCount; private long totalOverFieldCount; private long totalPartitionFieldCount; @@ -279,6 +323,8 @@ public Builder(String jobId) { public Builder(ModelSizeStats modelSizeStats) { this.jobId = modelSizeStats.jobId; this.modelBytes = modelSizeStats.modelBytes; + this.modelBytesExceeded = modelSizeStats.modelBytesExceeded; + this.modelBytesMemoryLimit = modelSizeStats.modelBytesMemoryLimit; this.totalByFieldCount = modelSizeStats.totalByFieldCount; this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; @@ -293,6 +339,16 @@ public Builder setModelBytes(long modelBytes) { return this; } + public Builder setModelBytesExceeded(long modelBytesExceeded) { + this.modelBytesExceeded = modelBytesExceeded; + return this; + } + + public Builder setModelBytesMemoryLimit(long modelBytesMemoryLimit) { + this.modelBytesMemoryLimit = modelBytesMemoryLimit; + return this; + } + public Builder setTotalByFieldCount(long totalByFieldCount) { this.totalByFieldCount = totalByFieldCount; return this; @@ -330,8 +386,8 @@ public Builder setLogTime(Date logTime) { } public ModelSizeStats build() { - return new ModelSizeStats(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, - bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + return new ModelSizeStats(jobId, modelBytes, modelBytesExceeded, modelBytesMemoryLimit, totalByFieldCount, totalOverFieldCount, + totalPartitionFieldCount, bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index 595df0f8c31ad..c6eecca5e3d80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.Writeable; public class RollupSearchAction extends Action { @@ -22,16 +23,17 @@ private RollupSearchAction() { @Override public SearchResponse newResponse() { - return new SearchResponse(); + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + @Override + public Writeable.Reader getResponseReader() { + return SearchResponse::new; } public static class RequestBuilder extends ActionRequestBuilder { public RequestBuilder(ElasticsearchClient client, SearchRequest searchRequest) { super(client, INSTANCE, searchRequest); } - - RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new SearchRequest()); - } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java index 46111b9b16cd1..8d9b9c762e35f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java @@ -93,7 +93,7 @@ public String toString() { @Override public Version getMinimalSupportedVersion() { - return Version.V_6_0_0_beta2; + return Version.CURRENT.minimumIndexCompatibilityVersion(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java index 7c87198b96446..d4d45ef0a3cbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/oidc/OpenIdConnectRealmSettings.java @@ -76,8 +76,6 @@ private OpenIdConnectRealmSettings() { RealmSettings.realmSettingPrefix(TYPE), "rp.requested_scopes", key -> Setting.listSetting(key, Collections.singletonList("openid"), Function.identity(), Setting.Property.NodeScope)); - public static final Setting.AffixSetting OP_NAME - = RealmSettings.simpleString(TYPE, "op.name", Setting.Property.NodeScope); public static final Setting.AffixSetting OP_AUTHORIZATION_ENDPOINT = Setting.affixKeySetting(RealmSettings.realmSettingPrefix(TYPE), "op.authorization_endpoint", key -> Setting.simpleString(key, v -> { @@ -151,7 +149,7 @@ private OpenIdConnectRealmSettings() { public static Set> getSettings() { final Set> set = Sets.newHashSet( RP_CLIENT_ID, RP_REDIRECT_URI, RP_RESPONSE_TYPE, RP_REQUESTED_SCOPES, RP_CLIENT_SECRET, RP_SIGNATURE_ALGORITHM, - RP_POST_LOGOUT_REDIRECT_URI, OP_NAME, OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, + RP_POST_LOGOUT_REDIRECT_URI, OP_AUTHORIZATION_ENDPOINT, OP_TOKEN_ENDPOINT, OP_USERINFO_ENDPOINT, OP_ENDSESSION_ENDPOINT, OP_ISSUER, OP_JWKSET_PATH, HTTP_CONNECT_TIMEOUT, HTTP_CONNECTION_READ_TIMEOUT, HTTP_SOCKET_TIMEOUT, HTTP_MAX_CONNECTIONS, HTTP_MAX_ENDPOINT_CONNECTIONS, ALLOWED_CLOCK_SKEW); set.addAll(DelegatedAuthorizationSettings.getSettings(TYPE)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java index 492622b2c519c..28f263748135f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/Hasher.java @@ -351,6 +351,24 @@ public boolean verify(SecureString text, char[] hash) { return CharArrays.constantTimeEquals(computedHash, new String(saltAndHash, 12, saltAndHash.length - 12)); } }, + /* + * Unsalted SHA-256 , not suited for password storage. + */ + SHA256() { + @Override + public char[] hash(SecureString text) { + MessageDigest md = MessageDigests.sha256(); + md.update(CharArrays.toUtf8Bytes(text.getChars())); + return Base64.getEncoder().encodeToString(md.digest()).toCharArray(); + } + + @Override + public boolean verify(SecureString text, char[] hash) { + MessageDigest md = MessageDigests.sha256(); + md.update(CharArrays.toUtf8Bytes(text.getChars())); + return CharArrays.constantTimeEquals(Base64.getEncoder().encodeToString(md.digest()).toCharArray(), hash); + } + }, NOOP() { @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java index 08d754b4e5357..bde94b116c982 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissions.java @@ -139,11 +139,11 @@ private static void buildRoleQuery(User user, ScriptService scriptService, Shard NestedHelper nestedHelper = new NestedHelper(queryShardContext.getMapperService()); if (nestedHelper.mightMatchNestedDocs(roleQuery)) { roleQuery = new BooleanQuery.Builder().add(roleQuery, FILTER) - .add(Queries.newNonNestedFilter(queryShardContext.indexVersionCreated()), FILTER).build(); + .add(Queries.newNonNestedFilter(), FILTER).build(); } // If access is allowed on root doc then also access is allowed on all nested docs of that root document: BitSetProducer rootDocs = queryShardContext - .bitsetFilter(Queries.newNonNestedFilter(queryShardContext.indexVersionCreated())); + .bitsetFilter(Queries.newNonNestedFilter()); ToChildBlockJoinQuery includeNestedDocs = new ToChildBlockJoinQuery(roleQuery, rootDocs); filter.add(includeNestedDocs, SHOULD); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index fa5ec665043ae..6f74a182e5a77 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -57,8 +57,6 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; - /** * Provides access to {@link SSLEngine} and {@link SSLSocketFactory} objects based on a provided configuration. All * configurations loaded by this service must be configured on construction. @@ -75,9 +73,7 @@ public class SSLService { private static final Map ORDERED_PROTOCOL_ALGORITHM_MAP; static { LinkedHashMap protocolAlgorithmMap = new LinkedHashMap<>(); - if (DEFAULT_SUPPORTED_PROTOCOLS.contains("TLSv1.3")) { - protocolAlgorithmMap.put("TLSv1.3", "TLSv1.3"); - } + protocolAlgorithmMap.put("TLSv1.3", "TLSv1.3"); protocolAlgorithmMap.put("TLSv1.2", "TLSv1.2"); protocolAlgorithmMap.put("TLSv1.1", "TLSv1.1"); protocolAlgorithmMap.put("TLSv1", "TLSv1"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TLSLicenseBootstrapCheck.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TLSLicenseBootstrapCheck.java index 6f6592bbdfca2..a042aeb4a2359 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TLSLicenseBootstrapCheck.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TLSLicenseBootstrapCheck.java @@ -9,6 +9,7 @@ import org.elasticsearch.bootstrap.BootstrapContext; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackSettings; /** @@ -19,10 +20,11 @@ public final class TLSLicenseBootstrapCheck implements BootstrapCheck { public BootstrapCheckResult check(BootstrapContext context) { if (XPackSettings.TRANSPORT_SSL_ENABLED.get(context.settings()) == false) { License license = LicenseService.getLicense(context.metaData()); - if (license != null && license.isProductionLicense()) { - return BootstrapCheckResult.failure("Transport SSL must be enabled for setups with production licenses. Please set " + - "[xpack.security.transport.ssl.enabled] to [true] or disable security by setting [xpack.security.enabled] " + - "to [false]"); + if (XPackLicenseState.isTransportTlsRequired(license, context.settings())) { + return BootstrapCheckResult.failure("Transport SSL must be enabled if security is enabled on a [" + + license.operationMode().description() + "] license. " + + "Please set [xpack.security.transport.ssl.enabled] to [true] or disable security by setting " + + "[xpack.security.enabled] to [false]"); } } return BootstrapCheckResult.success(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java index a9bc737c94387..f570e0b42ed44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/TrustConfig.java @@ -119,9 +119,8 @@ X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { try { return CertParsingUtils.trustManager(trustConfigs.stream() - .flatMap((tc) -> Arrays.stream(tc.createTrustManager(environment).getAcceptedIssuers())) - .collect(Collectors.toList()) - .toArray(new X509Certificate[0])); + .flatMap((tc) -> Arrays.stream(tc.createTrustManager(environment).getAcceptedIssuers())) + .toArray(X509Certificate[]::new)); } catch (Exception e) { throw new ElasticsearchException("failed to create trust manager", e); } diff --git a/x-pack/plugin/core/src/main/resources/security-index-template-7.json b/x-pack/plugin/core/src/main/resources/security-index-template-7.json index ebf6d073cd8a6..dae6462b7a6f0 100644 --- a/x-pack/plugin/core/src/main/resources/security-index-template-7.json +++ b/x-pack/plugin/core/src/main/resources/security-index-template-7.json @@ -213,8 +213,19 @@ "type": "date", "format": "epoch_millis" }, - "superseded_by": { - "type": "keyword" + "superseding": { + "type": "object", + "properties": { + "encrypted_tokens": { + "type": "binary" + }, + "encryption_iv": { + "type": "binary" + }, + "encryption_salt": { + "type": "binary" + } + } }, "invalidated" : { "type" : "boolean" diff --git a/x-pack/plugin/core/src/main/resources/security-tokens-index-template-7.json b/x-pack/plugin/core/src/main/resources/security-tokens-index-template-7.json index e7450d0be9c28..312d9ff9e3f58 100644 --- a/x-pack/plugin/core/src/main/resources/security-tokens-index-template-7.json +++ b/x-pack/plugin/core/src/main/resources/security-tokens-index-template-7.json @@ -35,8 +35,19 @@ "type": "date", "format": "epoch_millis" }, - "superseded_by": { - "type": "keyword" + "superseding": { + "type": "object", + "properties": { + "encrypted_tokens": { + "type": "binary" + }, + "encryption_iv": { + "type": "binary" + }, + "encryption_salt": { + "type": "binary" + } + } }, "invalidated" : { "type" : "boolean" diff --git a/x-pack/plugin/core/src/test/eclipse-build.gradle b/x-pack/plugin/core/src/test/eclipse-build.gradle new file mode 100644 index 0000000000000..ddec3a7fbd7d3 --- /dev/null +++ b/x-pack/plugin/core/src/test/eclipse-build.gradle @@ -0,0 +1,6 @@ +// this is just shell gradle file for eclipse to have separate projects for core src and tests +apply from: '../../build.gradle' + +dependencies { + testCompile project(':x-pack:plugin:core') +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index 1a0a8e764124d..9641f09711995 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -12,11 +12,13 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; @@ -232,7 +234,8 @@ public void testExecuteWithHeadersNoHeaders() { when(client.threadPool()).thenReturn(threadPool); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - searchFuture.onResponse(new SearchResponse()); + searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY)); when(client.search(any())).thenReturn(searchFuture); assertExecutionWithOrigin(Collections.emptyMap(), client); } @@ -245,7 +248,8 @@ public void testExecuteWithHeaders() { when(client.threadPool()).thenReturn(threadPool); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - searchFuture.onResponse(new SearchResponse()); + searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY)); when(client.search(any())).thenReturn(searchFuture); Map headers = MapBuilder. newMapBuilder().put(AuthenticationField.AUTHENTICATION_KEY, "anything") .put(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything").map(); @@ -265,7 +269,8 @@ public void testExecuteWithHeadersNoSecurityHeaders() { when(client.threadPool()).thenReturn(threadPool); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - searchFuture.onResponse(new SearchResponse()); + searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY)); when(client.search(any())).thenReturn(searchFuture); Map unrelatedHeaders = MapBuilder. newMapBuilder().put(randomAlphaOfLength(10), "anything").map(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/TestXPackTransportClient.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/TestXPackTransportClient.java deleted file mode 100644 index 30c370c14c27d..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/TestXPackTransportClient.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core; - -import io.netty.util.ThreadDeathWatcher; -import io.netty.util.concurrent.GlobalEventExecutor; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.xpack.core.security.SecurityField; - -import java.util.Arrays; -import java.util.Collection; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.test.ESTestCase.getTestTransportPlugin; - -/** - * TransportClient.Builder that installs the XPackPlugin by default. - */ -@SuppressWarnings({"unchecked","varargs"}) -public class TestXPackTransportClient extends TransportClient { - - @SafeVarargs - public TestXPackTransportClient(Settings settings, Class... plugins) { - this(settings, Arrays.asList(plugins)); - } - - public TestXPackTransportClient(Settings settings, Collection> plugins) { - super(settings, Settings.EMPTY, addPlugins(plugins, getTestTransportPlugin()), null); - } - - @Override - public void close() { - super.close(); - if (NetworkModule.TRANSPORT_TYPE_SETTING.exists(settings) == false - || NetworkModule.TRANSPORT_TYPE_SETTING.get(settings).equals(SecurityField.NAME4)) { - try { - GlobalEventExecutor.INSTANCE.awaitInactivity(5, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - try { - ThreadDeathWatcher.awaitInactivity(5, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - } - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java index 004b46897a48e..9f430819225a4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java @@ -5,11 +5,10 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import javax.crypto.Cipher; import javax.crypto.SecretKeyFactory; -import javax.net.ssl.SSLContext; import java.security.NoSuchAlgorithmException; @@ -20,17 +19,23 @@ public class XPackSettingsTests extends ESTestCase { - public void testDefaultSSLCiphers() throws Exception { + public void testDefaultSSLCiphers() { assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_RSA_WITH_AES_128_CBC_SHA")); + assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_RSA_WITH_AES_256_CBC_SHA")); + } - final boolean useAES256 = Cipher.getMaxAllowedKeyLength("AES") > 128; - if (useAES256) { - logger.info("AES 256 is available"); - assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_RSA_WITH_AES_256_CBC_SHA")); - } else { - logger.info("AES 256 is not available"); - assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_RSA_WITH_AES_256_CBC_SHA"))); - } + public void testChaCha20InCiphersOnJdk12Plus() { + assumeTrue("Test is only valid on JDK 12+ JVM", JavaVersion.current().compareTo(JavaVersion.parse("12")) > -1); + assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_CHACHA20_POLY1305_SHA256")); + assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256")); + assertThat(XPackSettings.DEFAULT_CIPHERS, hasItem("TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256")); + } + + public void testChaCha20NotInCiphersOnPreJdk12() { + assumeTrue("Test is only valid on pre JDK 12 JVM", JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0); + assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_CHACHA20_POLY1305_SHA256"))); + assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256"))); + assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256"))); } public void testPasswordHashingAlgorithmSettingValidation() { @@ -50,16 +55,10 @@ public void testPasswordHashingAlgorithmSettingValidation() { Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), bcryptAlgo).build())); } - public void testDefaultSupportedProtocolsWithTLSv13() throws Exception { - assumeTrue("current JVM does not support TLSv1.3", supportTLSv13()); + public void testDefaultSupportedProtocols() { assertThat(XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS, contains("TLSv1.3", "TLSv1.2", "TLSv1.1")); } - public void testDefaultSupportedProtocolsWithoutTLSv13() throws Exception { - assumeFalse("current JVM supports TLSv1.3", supportTLSv13()); - assertThat(XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS, contains("TLSv1.2", "TLSv1.1")); - } - private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) { try { SecretKeyFactory.getInstance(algorithmId); @@ -68,13 +67,4 @@ private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) { return false; } } - - private boolean supportTLSv13() { - try { - SSLContext.getInstance("TLSv1.3"); - return true; - } catch (NoSuchAlgorithmException e) { - return false; - } - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java deleted file mode 100644 index 54501fde5cfe8..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/action/DeleteDataFrameTransformActionResponseTests.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.core.dataframe.action; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction.Response; - -public class DeleteDataFrameTransformActionResponseTests extends AbstractWireSerializingDataFrameTestCase { - @Override - protected Response createTestInstance() { - return new Response(randomBoolean()); - } - - @Override - protected Reader instanceReader() { - return Response::new; - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfigTests.java index 1586ea540f4b4..2f93f50d4d136 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transforms/pivot/PivotConfigTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.dataframe.transforms.pivot; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.XContentParser; @@ -13,15 +14,25 @@ import org.elasticsearch.xpack.core.dataframe.transforms.AbstractSerializingDataFrameTestCase; import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; public class PivotConfigTests extends AbstractSerializingDataFrameTestCase { public static PivotConfig randomPivotConfig() { - return new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), + AggregationConfigTests.randomAggregationConfig(), + randomBoolean() ? null : randomIntBetween(10, 10_000)); } public static PivotConfig randomInvalidPivotConfig() { - return new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomInvalidAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), + AggregationConfigTests.randomInvalidAggregationConfig(), + randomBoolean() ? null : randomIntBetween(10, 10_000)); } @Override @@ -103,7 +114,7 @@ public void testEmptyGroupBy() throws IOException { assertFalse(pivotConfig.isValid()); } - public void testMissingGroupBy() throws IOException { + public void testMissingGroupBy() { String pivot = "{" + " \"aggs\": {" + " \"avg\": {" @@ -114,7 +125,7 @@ public void testMissingGroupBy() throws IOException { expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); } - public void testDoubleAggs() throws IOException { + public void testDoubleAggs() { String pivot = "{" + " \"group_by\": {" + " \"id\": {" @@ -136,6 +147,68 @@ public void testDoubleAggs() throws IOException { expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); } + public void testValidAggNames() throws IOException { + String pivotAggs = "{" + + " \"group_by\": {" + + " \"user.id.field\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } }," + + " \"aggs\": {" + + " \"avg.field.value\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } } }"; + PivotConfig pivotConfig = createPivotConfigFromString(pivotAggs, true); + assertTrue(pivotConfig.isValid()); + List fieldValidation = pivotConfig.aggFieldValidation(); + assertTrue(fieldValidation.isEmpty()); + } + + public void testAggNameValidationsWithoutIssues() { + String prefix = randomAlphaOfLength(10) + "1"; + String prefix2 = randomAlphaOfLength(10) + "2"; + String nestedField1 = randomAlphaOfLength(10) + "3"; + String nestedField2 = randomAlphaOfLength(10) + "4"; + + assertThat(PivotConfig.aggFieldValidation(Arrays.asList(prefix + nestedField1 + nestedField2, + prefix + nestedField1, + prefix, + prefix2)), is(empty())); + + assertThat(PivotConfig.aggFieldValidation( + Arrays.asList( + dotJoin(prefix, nestedField1, nestedField2), + dotJoin(nestedField1, nestedField2), + nestedField2, + prefix2)), is(empty())); + } + + public void testAggNameValidationsWithDuplicatesAndNestingIssues() { + String prefix = randomAlphaOfLength(10) + "1"; + String prefix2 = randomAlphaOfLength(10) + "2"; + String nestedField1 = randomAlphaOfLength(10) + "3"; + String nestedField2 = randomAlphaOfLength(10) + "4"; + + List failures = PivotConfig.aggFieldValidation( + Arrays.asList( + dotJoin(prefix, nestedField1, nestedField2), + dotJoin(prefix, nestedField2), + dotJoin(prefix, nestedField1), + dotJoin(prefix2, nestedField1), + dotJoin(prefix2, nestedField1), + prefix2)); + + assertThat(failures, + containsInAnyOrder("duplicate field [" + dotJoin(prefix2, nestedField1) + "] detected", + "field [" + prefix2 + "] cannot be both an object and a field", + "field [" + dotJoin(prefix, nestedField1) + "] cannot be both an object and a field")); + } + + private static String dotJoin(String... fields) { + return Strings.arrayToDelimitedString(fields, "."); + } + private PivotConfig createPivotConfigFromString(String json, boolean lenient) throws IOException { final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java index b39c4f1a25a76..27fba82338a1c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.io.IOException; import java.util.Collections; @@ -34,17 +35,26 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase { AtomicBoolean isFinished = new AtomicBoolean(false); + AtomicBoolean isStopped = new AtomicBoolean(false); + + @Before + public void reset() { + isFinished.set(false); + isStopped.set(false); + } private class MockIndexer extends AsyncTwoPhaseIndexer { private final CountDownLatch latch; // test the execution order private volatile int step; + private final boolean stoppedBeforeFinished; protected MockIndexer(Executor executor, AtomicReference initialState, Integer initialPosition, - CountDownLatch latch) { + CountDownLatch latch, boolean stoppedBeforeFinished) { super(executor, initialState, initialPosition, new MockJobStats()); this.latch = latch; + this.stoppedBeforeFinished = stoppedBeforeFinished; } @Override @@ -57,7 +67,7 @@ protected IterationResult doProcess(SearchResponse searchResponse) { awaitForLatch(); assertThat(step, equalTo(3)); ++step; - return new IterationResult(Collections.emptyList(), 3, true); + return new IterationResult<>(Collections.emptyList(), 3, true); } private void awaitForLatch() { @@ -99,7 +109,8 @@ protected void doNextBulk(BulkRequest request, ActionListener next @Override protected void doSaveState(IndexerState state, Integer position, Runnable next) { - assertThat(step, equalTo(5)); + int expectedStep = stoppedBeforeFinished ? 3 : 5; + assertThat(step, equalTo(expectedStep)); ++step; next.run(); } @@ -114,7 +125,12 @@ protected void onFinish(ActionListener listener) { assertThat(step, equalTo(4)); ++step; listener.onResponse(null); - isFinished.set(true); + assertTrue(isFinished.compareAndSet(false, true)); + } + + @Override + protected void onStop() { + assertTrue(isStopped.compareAndSet(false, true)); } @Override @@ -180,7 +196,7 @@ protected void doSaveState(IndexerState state, Integer position, Runnable next) protected void onFailure(Exception exc) { assertThat(step, equalTo(2)); ++step; - isFinished.set(true); + assertTrue(isFinished.compareAndSet(false, true)); } @Override @@ -206,13 +222,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42084") public void testStateMachine() throws Exception { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); - isFinished.set(false); try { CountDownLatch countDownLatch = new CountDownLatch(1); - MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch); + MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch, false); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); @@ -220,7 +236,8 @@ public void testStateMachine() throws Exception { countDownLatch.countDown(); assertThat(indexer.getPosition(), equalTo(2)); - ESTestCase.awaitBusy(() -> isFinished.get()); + assertTrue(awaitBusy(() -> isFinished.get())); + assertFalse(isStopped.get()); assertThat(indexer.getStep(), equalTo(6)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumPages(), equalTo(1L)); @@ -234,18 +251,59 @@ public void testStateMachine() throws Exception { public void testStateMachineBrokenSearch() throws InterruptedException { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); - isFinished.set(false); try { MockIndexerThrowsFirstSearch indexer = new MockIndexerThrowsFirstSearch(executor, state, 2); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); - assertTrue(ESTestCase.awaitBusy(() -> isFinished.get(), 10000, TimeUnit.SECONDS)); + assertTrue(awaitBusy(() -> isFinished.get(), 10000, TimeUnit.SECONDS)); assertThat(indexer.getStep(), equalTo(3)); } finally { executor.shutdownNow(); } } + + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42084") + public void testStop_AfterIndexerIsFinished() throws InterruptedException { + AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); + final ExecutorService executor = Executors.newFixedThreadPool(1); + try { + CountDownLatch countDownLatch = new CountDownLatch(1); + MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch, false); + indexer.start(); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + countDownLatch.countDown(); + assertTrue(awaitBusy(() -> isFinished.get())); + + indexer.stop(); + assertTrue(isStopped.get()); + assertThat(indexer.getState(), equalTo(IndexerState.STOPPED)); + } finally { + executor.shutdownNow(); + } + } + + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/42084") + public void testStop_WhileIndexing() throws InterruptedException { + AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); + final ExecutorService executor = Executors.newFixedThreadPool(1); + try { + CountDownLatch countDownLatch = new CountDownLatch(1); + MockIndexer indexer = new MockIndexer(executor, state, 2, countDownLatch, true); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + indexer.stop(); + countDownLatch.countDown(); + + assertThat(indexer.getPosition(), equalTo(2)); + assertTrue(awaitBusy(() -> isStopped.get())); + assertFalse(isFinished.get()); + } finally { + executor.shutdownNow(); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractActionTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractActionTestCase.java index bed04a7cf5425..ab35221afec0b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractActionTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/AbstractActionTestCase.java @@ -7,10 +7,6 @@ package org.elasticsearch.xpack.core.indexlifecycle; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; - -import java.util.List; -import java.util.stream.Collectors; public abstract class AbstractActionTestCase extends AbstractSerializingTestCase { @@ -25,16 +21,4 @@ public final void testIsSafeAction() { assertEquals(isSafeAction(), action.isSafeAction()); } - public void testToStepKeys() { - T action = createTestInstance(); - String phase = randomAlphaOfLengthBetween(1, 10); - StepKey nextStepKey = new StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), - randomAlphaOfLengthBetween(1, 10)); - List steps = action.toSteps(null, phase, nextStepKey); - assertNotNull(steps); - List stepKeys = action.toStepKeys(phase); - assertNotNull(stepKeys); - List expectedStepKeys = steps.stream().map(Step::getKey).collect(Collectors.toList()); - assertEquals(expectedStepKeys, stepKeys); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/MockAction.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/MockAction.java index 30eabac562606..9ca6dc17d3e32 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/MockAction.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexlifecycle/MockAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.indexlifecycle.Step.StepKey; import java.io.IOException; import java.util.ArrayList; @@ -75,11 +74,6 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) return new ArrayList<>(steps); } - @Override - public List toStepKeys(String phase) { - return steps.stream().map(Step::getKey).collect(Collectors.toList()); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeList(steps.stream().map(MockStep::new).collect(Collectors.toList())); @@ -103,4 +97,4 @@ public boolean equals(Object obj) { return Objects.equals(steps, other.steps) && Objects.equals(safe, other.safe); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java index dc87cf744cb98..d544584376f47 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/AggProviderTests.java @@ -5,13 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.DeprecationHandler; @@ -29,9 +24,7 @@ import java.util.Collections; import java.util.Map; -import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; public class AggProviderTests extends AbstractSerializingTestCase { @@ -96,68 +89,6 @@ public void testEmptyAggMap() throws IOException { assertThat(e.getMessage(), equalTo("Datafeed aggregations are not parsable")); } - public void testSerializationBetweenBugVersion() throws IOException { - AggProvider tempAggProvider = createRandomValidAggProvider(); - AggProvider aggProviderWithEx = new AggProvider(tempAggProvider.getAggs(), tempAggProvider.getParsedAggs(), new IOException("ex")); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_6_2); - aggProviderWithEx.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.V_6_6_2); - AggProvider streamedAggProvider = AggProvider.fromStream(in); - assertThat(streamedAggProvider.getAggs(), equalTo(aggProviderWithEx.getAggs())); - assertThat(streamedAggProvider.getParsingException(), is(nullValue())); - - AggregatorFactories.Builder streamedParsedAggs = XContentObjectTransformer.aggregatorTransformer(xContentRegistry()) - .fromMap(streamedAggProvider.getAggs()); - assertThat(streamedParsedAggs, equalTo(aggProviderWithEx.getParsedAggs())); - assertThat(streamedAggProvider.getParsedAggs(), is(nullValue())); - } - } - } - - public void testSerializationBetweenEagerVersion() throws IOException { - AggProvider validAggProvider = createRandomValidAggProvider(); - - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_0_0); - validAggProvider.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.V_6_0_0); - AggProvider streamedAggProvider = AggProvider.fromStream(in); - assertThat(streamedAggProvider.getAggs(), equalTo(validAggProvider.getAggs())); - assertThat(streamedAggProvider.getParsingException(), is(nullValue())); - assertThat(streamedAggProvider.getParsedAggs(), equalTo(validAggProvider.getParsedAggs())); - } - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(), - validAggProvider.getParsedAggs(), - new IOException("bad parsing")); - output.setVersion(Version.V_6_0_0); - IOException ex = expectThrows(IOException.class, () -> aggProviderWithEx.writeTo(output)); - assertThat(ex.getMessage(), equalTo("bad parsing")); - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - AggProvider aggProviderWithEx = new AggProvider(validAggProvider.getAggs(), - validAggProvider.getParsedAggs(), - new ElasticsearchException("bad parsing")); - output.setVersion(Version.V_6_0_0); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithEx.writeTo(output)); - assertNotNull(ex.getCause()); - assertThat(ex.getCause().getMessage(), equalTo("bad parsing")); - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - AggProvider aggProviderWithOutParsed = new AggProvider(validAggProvider.getAggs(), null, null); - output.setVersion(Version.V_6_0_0); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> aggProviderWithOutParsed.writeTo(output)); - assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed aggregations are null")); - } - } - @Override protected AggProvider mutateInstance(AggProvider instance) throws IOException { Exception parsingException = instance.getParsingException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 6b664777a2d86..062504cbfdc3c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -762,10 +762,10 @@ public void testSerializationOfComplexAggsBetweenVersions() throws IOException { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_0_0); + output.setVersion(Version.CURRENT); datafeedConfig.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { - in.setVersion(Version.V_6_0_0); + in.setVersion(Version.CURRENT); DatafeedConfig streamedDatafeedConfig = new DatafeedConfig(in); assertEquals(datafeedConfig, streamedDatafeedConfig); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 571c9e81a9068..6aa7487147ca1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -321,10 +321,10 @@ public void testSerializationOfComplexAggsBetweenVersions() throws IOException { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_0_0); + output.setVersion(Version.CURRENT); datafeedUpdate.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { - in.setVersion(Version.V_6_0_0); + in.setVersion(Version.CURRENT); DatafeedUpdate streamedDatafeedUpdate = new DatafeedUpdate(in); assertEquals(datafeedUpdate, streamedDatafeedUpdate); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java index fb6c2e280d975..8d113aba33579 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/QueryProviderTests.java @@ -5,13 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.DeprecationHandler; @@ -32,9 +27,7 @@ import java.util.Collections; import java.util.Map; -import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; public class QueryProviderTests extends AbstractSerializingTestCase { @@ -96,74 +89,6 @@ public void testEmptyQueryMap() throws IOException { assertThat(e.getMessage(), equalTo("Datafeed query is not parsable")); } - public void testSerializationBetweenBugVersion() throws IOException { - QueryProvider tempQueryProvider = createRandomValidQueryProvider(); - QueryProvider queryProviderWithEx = new QueryProvider(tempQueryProvider.getQuery(), - tempQueryProvider.getParsedQuery(), - new IOException("ex")); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_6_2); - queryProviderWithEx.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.V_6_6_2); - QueryProvider streamedQueryProvider = QueryProvider.fromStream(in); - assertThat(streamedQueryProvider.getQuery(), equalTo(queryProviderWithEx.getQuery())); - assertThat(streamedQueryProvider.getParsingException(), is(nullValue())); - - QueryBuilder streamedParsedQuery = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()) - .fromMap(streamedQueryProvider.getQuery()); - assertThat(streamedParsedQuery, equalTo(queryProviderWithEx.getParsedQuery())); - assertThat(streamedQueryProvider.getParsedQuery(), is(nullValue())); - } - } - } - - public void testSerializationBetweenEagerVersion() throws IOException { - QueryProvider validQueryProvider = createRandomValidQueryProvider(); - - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(Version.V_6_0_0); - validQueryProvider.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry())) { - in.setVersion(Version.V_6_0_0); - - QueryProvider streamedQueryProvider = QueryProvider.fromStream(in); - XContentObjectTransformer transformer = XContentObjectTransformer.queryBuilderTransformer(xContentRegistry()); - Map sourceQueryMapWithDefaults = transformer.toMap(transformer.fromMap(validQueryProvider.getQuery())); - - assertThat(streamedQueryProvider.getQuery(), equalTo(sourceQueryMapWithDefaults)); - assertThat(streamedQueryProvider.getParsingException(), is(nullValue())); - assertThat(streamedQueryProvider.getParsedQuery(), equalTo(validQueryProvider.getParsedQuery())); - } - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(), - validQueryProvider.getParsedQuery(), - new IOException("bad parsing")); - output.setVersion(Version.V_6_0_0); - IOException ex = expectThrows(IOException.class, () -> queryProviderWithEx.writeTo(output)); - assertThat(ex.getMessage(), equalTo("bad parsing")); - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - QueryProvider queryProviderWithEx = new QueryProvider(validQueryProvider.getQuery(), - validQueryProvider.getParsedQuery(), - new ElasticsearchException("bad parsing")); - output.setVersion(Version.V_6_0_0); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithEx.writeTo(output)); - assertNotNull(ex.getCause()); - assertThat(ex.getCause().getMessage(), equalTo("bad parsing")); - } - - try (BytesStreamOutput output = new BytesStreamOutput()) { - QueryProvider queryProviderWithOutParsed = new QueryProvider(validQueryProvider.getQuery(), null, null); - output.setVersion(Version.V_6_0_0); - ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> queryProviderWithOutParsed.writeTo(output)); - assertThat(ex.getMessage(), equalTo("Unsupported operation: parsed query is null")); - } - } - @Override protected QueryProvider mutateInstance(QueryProvider instance) throws IOException { Exception parsingException = instance.getParsingException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java index e249b22a4a896..eb4f2c0bbc2da 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; @@ -91,7 +92,7 @@ public JobUpdate createRandom(String jobId, @Nullable Job job) { update.setModelSnapshotMinVersion(Version.CURRENT); } if (useInternalParser && randomBoolean()) { - update.setJobVersion(randomFrom(Version.CURRENT, Version.V_6_2_0, Version.V_6_1_0)); + update.setJobVersion(VersionUtils.randomCompatibleVersion(random(), Version.CURRENT)); } if (useInternalParser) { update.setClearFinishTime(randomBoolean()); @@ -213,7 +214,7 @@ public void testMergeWithJob() { updateBuilder.setCategorizationFilters(categorizationFilters); updateBuilder.setCustomSettings(customSettings); updateBuilder.setModelSnapshotId(randomAlphaOfLength(10)); - updateBuilder.setJobVersion(Version.V_6_1_0); + updateBuilder.setJobVersion(VersionUtils.randomCompatibleVersion(random(), Version.CURRENT)); JobUpdate update = updateBuilder.build(); Job.Builder jobBuilder = new Job.Builder("foo"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStatsTests.java index e66fea90f049b..90e4bacc3f8b1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSizeStatsTests.java @@ -22,6 +22,8 @@ public class ModelSizeStatsTests extends AbstractSerializingTestCase productionModes = EnumSet.of(License.OperationMode.GOLD, License.OperationMode.PLATINUM, - License.OperationMode.STANDARD); - MetaData.Builder builder = MetaData.builder(); - TestUtils.putLicense(builder, license); - MetaData build = builder.build(); - if (productionModes.contains(license.operationMode()) == false) { - assertTrue(new TLSLicenseBootstrapCheck().check(createTestContext( - Settings.builder().put("xpack.security.transport.ssl.enabled", true).build(), build)).isSuccess()); - } else { - assertTrue(new TLSLicenseBootstrapCheck().check(createTestContext( - Settings.builder().put("xpack.security.transport.ssl.enabled", false).build(), build)).isFailure()); - assertEquals("Transport SSL must be enabled for setups with production licenses. Please set " + - "[xpack.security.transport.ssl.enabled] to [true] or disable security by setting " + - "[xpack.security.enabled] to [false]", - new TLSLicenseBootstrapCheck().check(createTestContext( - Settings.builder().put("xpack.security.transport.ssl.enabled", false).build(), build)).getMessage()); - } + , randomBoolean()).build(), MetaData.EMPTY_META_DATA)).isSuccess()); + } + + public void testBootstrapCheckFailureOnPremiumLicense() throws Exception { + final OperationMode mode = randomFrom(OperationMode.PLATINUM, OperationMode.GOLD, OperationMode.STANDARD); + final Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + // randomise between default-false & explicit-false + settings.put("xpack.security.transport.ssl.enabled", false); + } + if (randomBoolean()) { + // randomise between default-true & explicit-true + settings.put("xpack.security.enabled", true); + } + + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(mode, settings); + assertTrue("Expected bootstrap failure", result.isFailure()); + assertEquals("Transport SSL must be enabled if security is enabled on a [" + mode.description() + "] license. Please set " + + "[xpack.security.transport.ssl.enabled] to [true] or disable security by setting " + + "[xpack.security.enabled] to [false]", + result.getMessage()); + } + + public void testBootstrapCheckSucceedsWithTlsEnabledOnPremiumLicense() throws Exception { + final OperationMode mode = randomFrom(OperationMode.PLATINUM, OperationMode.GOLD, OperationMode.STANDARD); + final Settings.Builder settings = Settings.builder().put("xpack.security.transport.ssl.enabled", true); + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(mode, settings); + assertSuccess(result); + } + + public void testBootstrapCheckFailureOnBasicLicense() throws Exception { + final Settings.Builder settings = Settings.builder().put("xpack.security.enabled", true); + if (randomBoolean()) { + // randomise between default-false & explicit-false + settings.put("xpack.security.transport.ssl.enabled", false); + } + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(OperationMode.BASIC, settings); + assertTrue("Expected bootstrap failure", result.isFailure()); + assertEquals("Transport SSL must be enabled if security is enabled on a [basic] license. Please set " + + "[xpack.security.transport.ssl.enabled] to [true] or disable security by setting " + + "[xpack.security.enabled] to [false]", + result.getMessage()); + } + + public void testBootstrapSucceedsIfSecurityIsNotEnabledOnBasicLicense() throws Exception { + final Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + // randomise between default-false & explicit-false + settings.put("xpack.security.enabled", false); + } + if (randomBoolean()) { + // it does not matter whether or not this is set, as security is not enabled. + settings.put("xpack.security.transport.ssl.enabled", randomBoolean()); } + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(OperationMode.BASIC, settings); + assertSuccess(result); } + + public void testBootstrapSucceedsIfTlsIsEnabledOnBasicLicense() throws Exception { + final Settings.Builder settings = Settings.builder().put("xpack.security.transport.ssl.enabled", true); + if (randomBoolean()) { + // it does not matter whether or not this is set, as TLS is enabled. + settings.put("xpack.security.enabled", randomBoolean()); + } + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(OperationMode.BASIC, settings); + assertSuccess(result); + } + + public void testBootstrapCheckAlwaysSucceedsOnTrialLicense() throws Exception { + final Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + // it does not matter whether this is set, or to which value. + settings.put("xpack.security.enabled", randomBoolean()); + } + if (randomBoolean()) { + // it does not matter whether this is set, or to which value. + settings.put("xpack.security.transport.ssl.enabled", randomBoolean()); + } + final BootstrapCheck.BootstrapCheckResult result = runBootstrapCheck(OperationMode.TRIAL, settings); + assertSuccess(result); + } + + public BootstrapCheck.BootstrapCheckResult runBootstrapCheck(OperationMode mode, Settings.Builder settings) throws Exception { + final License license = TestUtils.generateSignedLicense(mode.description(), TimeValue.timeValueHours(24)); + MetaData.Builder builder = MetaData.builder(); + TestUtils.putLicense(builder, license); + MetaData metaData = builder.build(); + final BootstrapContext context = createTestContext(settings.build(), metaData); + return new TLSLicenseBootstrapCheck().check(context); + } + + public void assertSuccess(BootstrapCheck.BootstrapCheckResult result) { + if (result.isFailure()) { + fail("Bootstrap check failed unexpectedly: " + result.getMessage()); + } + } + } diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks index d6dc21c1bd5ff..b4cf27dfd26a8 100644 Binary files a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks and b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks differ diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem new file mode 100644 index 0000000000000..f8f29e47e0d35 --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem @@ -0,0 +1,8 @@ +Bag Attributes + friendlyName: testnode_ec + localKeyID: 54 69 6D 65 20 31 35 35 36 30 33 32 36 30 37 32 33 30 +Key Attributes: +-----BEGIN PRIVATE KEY----- +MEECAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQcEJzAlAgEBBCCxFwoKqfcGailZhuh0 +xEj3gssdjuEw6BasiC8+zhqHBA== +-----END PRIVATE KEY----- diff --git a/x-pack/plugin/data-frame/build.gradle b/x-pack/plugin/data-frame/build.gradle index e065f72e99880..1e939b2ceb949 100644 --- a/x-pack/plugin/data-frame/build.gradle +++ b/x-pack/plugin/data-frame/build.gradle @@ -13,6 +13,9 @@ dependencies { compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } // xpack modules are installed in real clusters as the meta plugin, so diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle b/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle index ab170d6be364f..0517f71b2dd3e 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('data-frame'), configuration: 'runtime') } diff --git a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java index 84f3e05de5cd1..3a6ab2e5b71d2 100644 --- a/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java +++ b/x-pack/plugin/data-frame/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameIntegTestCase.java @@ -93,11 +93,11 @@ protected StartDataFrameTransformAction.Response startDataFrameTransform(String new StartDataFrameTransformAction.Request(id, false)).actionGet(); } - protected DeleteDataFrameTransformAction.Response deleteDataFrameTransform(String id) { - DeleteDataFrameTransformAction.Response response = client().execute(DeleteDataFrameTransformAction.INSTANCE, + protected AcknowledgedResponse deleteDataFrameTransform(String id) { + AcknowledgedResponse response = client().execute(DeleteDataFrameTransformAction.INSTANCE, new DeleteDataFrameTransformAction.Request(id)) .actionGet(); - if (response.isDeleted()) { + if (response.isAcknowledged()) { transformConfigs.remove(id); } return response; @@ -172,7 +172,13 @@ protected AggregationConfig createAggConfig(AggregatorFactories.Builder aggregat protected PivotConfig createPivotConfig(Map groups, AggregatorFactories.Builder aggregations) throws Exception { - return new PivotConfig(createGroupConfig(groups), createAggConfig(aggregations)); + return createPivotConfig(groups, aggregations, null); + } + + protected PivotConfig createPivotConfig(Map groups, + AggregatorFactories.Builder aggregations, + Integer size) throws Exception { + return new PivotConfig(createGroupConfig(groups), createAggConfig(aggregations), size); } protected DataFrameTransformConfig createTransformConfig(String id, diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle index 11014c764e330..3fbd779f36239 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('data-frame'), configuration: 'runtime') } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 71f0dc248b8b6..770eaec7bd141 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -428,6 +428,56 @@ public void testPivotWithBucketScriptAgg() throws Exception { assertEquals(3.878048780, actual.doubleValue(), 0.000001); } + public void testPivotWithGeoCentroidAgg() throws Exception { + String transformId = "geoCentroidPivot"; + String dataFrameIndex = "geo_centroid_pivot_reviews"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, dataFrameIndex); + + final Request createDataframeTransformRequest = createRequestWithAuth("PUT", DATAFRAME_ENDPOINT + transformId, + BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + + String config = "{" + + " \"source\": {\"index\":\"" + REVIEWS_INDEX_NAME + "\"}," + + " \"dest\": {\"index\":\"" + dataFrameIndex + "\"},"; + + config += " \"pivot\": {" + + " \"group_by\": {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } }," + + " \"location\": {" + + " \"geo_centroid\": {\"field\": \"location\"}" + + " } } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS); + assertTrue(indexExists(dataFrameIndex)); + + // we expect 27 documents as there shall be 27 user_id's + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(27, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + + // get and check some users + Map searchResult = getAsMap(dataFrameIndex + "/_search?q=reviewer:user_4"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + Number actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", searchResult)).get(0); + assertEquals(3.878048780, actual.doubleValue(), 0.000001); + String actualString = (String) ((List) XContentMapValues.extractValue("hits.hits._source.location", searchResult)).get(0); + String[] latlon = actualString.split(","); + assertEquals((4 + 10), Double.valueOf(latlon[0]), 0.000001); + assertEquals((4 + 15), Double.valueOf(latlon[1]), 0.000001); + } + private void assertOnePivotValue(String query, double expected) throws IOException { Map searchResult = getAsMap(query); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 85c0ac44a69af..7ffa5391b7a4a 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; +import org.junit.After; import org.junit.AfterClass; import java.io.IOException; @@ -76,6 +77,9 @@ protected void createReviewsIndex() throws IOException { .startObject("stars") .field("type", "integer") .endObject() + .startObject("location") + .field("type", "geo_point") + .endObject() .endObject() .endObject(); } @@ -103,6 +107,7 @@ protected void createReviewsIndex() throws IOException { min = 10 + (i % 49); } int sec = 10 + (i % 49); + String location = (user + 10) + "," + (user + 15); String date_string = "2017-01-" + day + "T" + hour + ":" + min + ":" + sec + "Z"; bulk.append("{\"user_id\":\"") @@ -113,7 +118,9 @@ protected void createReviewsIndex() throws IOException { .append(business) .append("\",\"stars\":") .append(stars) - .append(",\"timestamp\":\"") + .append(",\"location\":\"") + .append(location) + .append("\",\"timestamp\":\"") .append(date_string) .append("\"}\n"); @@ -272,16 +279,20 @@ protected static void deleteDataFrameTransform(String transformId) throws IOExce adminClient().performRequest(request); } - @AfterClass - public static void removeIndices() throws Exception { + @After + public void waitForDataFrame() throws Exception { wipeDataFrameTransforms(); waitForPendingDataFrameTasks(); + } + + @AfterClass + public static void removeIndices() throws Exception { // we might have disabled wiping indices, but now its time to get rid of them // note: can not use super.cleanUpCluster() as this method must be static wipeIndices(); } - protected static void wipeDataFrameTransforms() throws IOException, InterruptedException { + public void wipeDataFrameTransforms() throws IOException { List> transformConfigs = getDataFrameTransforms(); for (Map transformConfig : transformConfigs) { String transformId = (String) transformConfig.get("id"); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformProgressIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformProgressIT.java index d338d6949f07b..194d35e8ba636 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformProgressIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameTransformProgressIT.java @@ -130,7 +130,7 @@ public void testGetProgress() throws Exception { AggregatorFactories.Builder aggs = new AggregatorFactories.Builder(); aggs.addAggregator(AggregationBuilders.avg("avg_rating").field("stars")); AggregationConfig aggregationConfig = new AggregationConfig(Collections.emptyMap(), aggs); - PivotConfig pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig); + PivotConfig pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null); DataFrameTransformConfig config = new DataFrameTransformConfig("get_progress_transform", sourceConfig, destConfig, @@ -149,7 +149,7 @@ public void testGetProgress() throws Exception { QueryConfig queryConfig = new QueryConfig(Collections.emptyMap(), QueryBuilders.termQuery("user_id", "user_26")); - pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig); + pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null); sourceConfig = new SourceConfig(new String[]{REVIEWS_INDEX_NAME}, queryConfig); config = new DataFrameTransformConfig("get_progress_transform", sourceConfig, diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index 24ce173b37567..4f209c5a9f3f4 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -10,7 +10,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.junit.Before; @@ -72,7 +72,7 @@ public void testUsage() throws Exception { Request statsExistsRequest = new Request("GET", DataFrameInternalIndex.INDEX_NAME+"/_search?q=" + INDEX_DOC_TYPE.getPreferredName() + ":" + - DataFrameIndexerTransformStats.NAME); + DataFrameTransformStateAndStats.NAME); // Verify that we have our two stats documents assertBusy(() -> { Map hasStatsMap = entityAsMap(client().performRequest(statsExistsRequest)); @@ -100,7 +100,6 @@ public void testUsage() throws Exception { expectedStats.merge(statName, statistic, Integer::sum); } - usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); usageAsMap = entityAsMap(usageResponse); @@ -109,7 +108,8 @@ public void testUsage() throws Exception { assertEquals(1, XContentMapValues.extractValue("data_frame.transforms.started", usageAsMap)); assertEquals(2, XContentMapValues.extractValue("data_frame.transforms.stopped", usageAsMap)); for(String statName : PROVIDED_STATS) { - assertEquals(expectedStats.get(statName), XContentMapValues.extractValue("data_frame.stats."+statName, usageAsMap)); + assertEquals("Incorrect stat " + statName, + expectedStats.get(statName), XContentMapValues.extractValue("data_frame.stats." + statName, usageAsMap)); } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index b7e6c235f8e6c..34343e5fe8820 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -76,10 +76,8 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Supplier; import java.util.function.UnaryOperator; @@ -90,11 +88,7 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu public static final String NAME = "data_frame"; public static final String TASK_THREAD_POOL_NAME = "data_frame_indexing"; - // list of headers that will be stored when a transform is created - public static final Set HEADER_FILTERS = new HashSet<>( - Arrays.asList("es-security-runas-user", "_xpack_security_authentication")); - - private static final Logger logger = LogManager.getLogger(XPackPlugin.class); + private static final Logger logger = LogManager.getLogger(DataFrame.class); private final boolean enabled; private final Settings settings; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java index 029fe88766df5..82b8a6060e44e 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; @@ -176,6 +177,7 @@ static DataFrameIndexerTransformStats parseSearchAggs(SearchResponse searchRespo for(String statName : PROVIDED_STATS) { Aggregation agg = searchResponse.getAggregations().get(statName); + if (agg instanceof NumericMetricsAggregation.SingleValue) { statisticsList.add((long)((NumericMetricsAggregation.SingleValue)agg).value()); } else { @@ -197,14 +199,15 @@ static DataFrameIndexerTransformStats parseSearchAggs(SearchResponse searchRespo static void getStatisticSummations(Client client, ActionListener statsListener) { QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), - DataFrameIndexerTransformStats.NAME))); + DataFrameTransformStateAndStats.NAME))); SearchRequestBuilder requestBuilder = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) .setSize(0) .setQuery(queryBuilder); + final String path = DataFrameField.STATS_FIELD.getPreferredName() + "."; for(String statName : PROVIDED_STATS) { - requestBuilder.addAggregation(AggregationBuilders.sum(statName).field(statName)); + requestBuilder.addAggregation(AggregationBuilders.sum(statName).field(path + statName)); } ActionListener getStatisticSummationsListener = ActionListener.wrap( @@ -213,6 +216,7 @@ static void getStatisticSummations(Client client, ActionListener { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodes.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodes.java new file mode 100644 index 0000000000000..1b2c54b331f42 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodes.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.action; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public final class DataFrameNodes { + + private DataFrameNodes() { + } + + /** + * Get the list of nodes the data frames are executing on + * + * @param dataFrameIds The data frames. + * @param clusterState State + * @return The executor nodes + */ + public static String[] dataFrameTaskNodes(List dataFrameIds, ClusterState clusterState) { + + Set executorNodes = new HashSet<>(); + + PersistentTasksCustomMetaData tasksMetaData = + PersistentTasksCustomMetaData.getPersistentTasksCustomMetaData(clusterState); + + if (tasksMetaData != null) { + Set dataFrameIdsSet = new HashSet<>(dataFrameIds); + + Collection> tasks = + tasksMetaData.findTasks(DataFrameField.TASK_NAME, t -> dataFrameIdsSet.contains(t.getId())); + + for (PersistentTasksCustomMetaData.PersistentTask task : tasks) { + executorNodes.add(task.getExecutorNode()); + } + } + + return executorNodes.toArray(new String[0]); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java index 2cdc4009e785b..ac40334dfb443 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java @@ -5,93 +5,73 @@ */ package org.elasticsearch.xpack.dataframe.action; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction.Request; -import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction.Response; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; -import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; -import java.util.List; +import java.io.IOException; -public class TransportDeleteDataFrameTransformAction extends TransportTasksAction { +public class TransportDeleteDataFrameTransformAction extends TransportMasterNodeAction { private final DataFrameTransformsConfigManager transformsConfigManager; @Inject - public TransportDeleteDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, DataFrameTransformsConfigManager transformsConfigManager) { - super(DeleteDataFrameTransformAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, - Response::new, ThreadPool.Names.SAME); + public TransportDeleteDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, ThreadPool threadPool, + ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, + DataFrameTransformsConfigManager transformsConfigManager) { + super(DeleteDataFrameTransformAction.NAME, transportService, clusterService, threadPool, actionFilters, + Request::new, indexNameExpressionResolver); this.transformsConfigManager = transformsConfigManager; } @Override - protected Response newResponse(Request request, List tasks, List taskOperationFailures, - List failedNodeExceptions) { - assert tasks.size() + taskOperationFailures.size() == 1; - boolean cancelled = tasks.size() > 0 && tasks.stream().allMatch(Response::isDeleted); + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } - return new Response(cancelled, taskOperationFailures, failedNodeExceptions); + protected AcknowledgedResponse read(StreamInput in) throws IOException { + AcknowledgedResponse response = new AcknowledgedResponse(); + response.readFrom(in); + return response; } @Override - protected void taskOperation(Request request, DataFrameTransformTask task, ActionListener listener) { - assert task.getTransformId().equals(request.getId()); - IndexerState state = task.getState().getIndexerState(); - if (state.equals(IndexerState.STOPPED)) { - task.onCancelled(); - transformsConfigManager.deleteTransform(request.getId(), ActionListener.wrap(r -> { - listener.onResponse(new Response(true)); - }, listener::onFailure)); + protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { + listener.onFailure(new ElasticsearchStatusException("Cannot delete data frame [" + request.getId() + + "] as the task is running. Stop the task first", RestStatus.CONFLICT)); } else { - listener.onFailure(new IllegalStateException("Could not delete transform [" + request.getId() + "] because " - + "indexer state is [" + state + "]. Transform must be [" + IndexerState.STOPPED + "] before deletion.")); + // Task is not running, delete the configuration document + transformsConfigManager.deleteTransform(request.getId(), ActionListener.wrap( + r -> listener.onResponse(new AcknowledgedResponse(r)), + listener::onFailure)); } } @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState state = clusterService.state(); - final DiscoveryNodes nodes = state.nodes(); - if (nodes.isLocalNodeElectedMaster()) { - PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { - super.doExecute(task, request, listener); - } else { - // we couldn't find the transform in the persistent task CS, but maybe the transform exists in the configuration index, - // if so delete the orphaned document and do not throw (for the normal case we want to stop the task first, - // than delete the configuration document if and only if the data frame transform is in stopped state) - transformsConfigManager.deleteTransform(request.getId(), ActionListener.wrap(r -> { - listener.onResponse(new Response(true)); - return; - }, listener::onFailure)); - } - } else { - // Delegates DeleteTransform to elected master node, so it becomes the coordinating node. - // Non-master nodes may have a stale cluster state that shows transforms which are cancelled - // on the master, which makes testing difficult. - if (nodes.getMasterNode() == null) { - listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); - } else { - transportService.sendRequest(nodes.getMasterNode(), actionName, request, - new ActionListenerResponseHandler<>(listener, Response::new)); - } - } + protected ClusterBlockException checkBlock(Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index 7ec5beb1131d4..df2d09a875d19 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -9,51 +9,29 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Request; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction.Response; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.dataframe.checkpoint.DataFrameTransformsCheckpointService; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; -import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; @@ -69,18 +47,16 @@ public class TransportGetDataFrameTransformsStatsAction extends private static final Logger logger = LogManager.getLogger(TransportGetDataFrameTransformsStatsAction.class); - private final Client client; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; private final DataFrameTransformsCheckpointService transformsCheckpointService; @Inject public TransportGetDataFrameTransformsStatsAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, Client client, + ClusterService clusterService, DataFrameTransformsConfigManager dataFrameTransformsConfigManager, DataFrameTransformsCheckpointService transformsCheckpointService) { super(GetDataFrameTransformsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, ThreadPool.Names.SAME); - this.client = client; this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; this.transformsCheckpointService = transformsCheckpointService; } @@ -124,6 +100,7 @@ protected void doExecute(Task task, Request request, ActionListener fi dataFrameTransformsConfigManager.expandTransformIds(request.getId(), request.getPageParams(), ActionListener.wrap( ids -> { request.setExpandedIds(ids); + request.setNodes(DataFrameNodes.dataFrameTaskNodes(ids, clusterService.state())); super.doExecute(task, request, ActionListener.wrap( response -> collectStatsForTransformsWithoutTasks(request, response, finalListener), finalListener::onFailure @@ -131,7 +108,6 @@ protected void doExecute(Task task, Request request, ActionListener fi }, e -> { // If the index to search, or the individual config is not there, just return empty - logger.error("failed to expand ids", e); if (e instanceof ResourceNotFoundException) { finalListener.onResponse(new Response(Collections.emptyList())); } else { @@ -157,32 +133,14 @@ private void collectStatsForTransformsWithoutTasks(Request request, // Small assurance that we are at least below the max. Terms search has a hard limit of 10k, we should at least be below that. assert transformsWithoutTasks.size() <= Request.MAX_SIZE_RETURN; - ActionListener searchStatsListener = ActionListener.wrap( - searchResponse -> { - List nodeFailures = new ArrayList<>(response.getNodeFailures()); - if (searchResponse.getShardFailures().length > 0) { - for(ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { - String nodeId = ""; - if (shardSearchFailure.shard() != null) { - nodeId = shardSearchFailure.shard().getNodeId(); - } - nodeFailures.add(new FailedNodeException(nodeId, shardSearchFailure.toString(), shardSearchFailure.getCause())); - } - logger.error("transform statistics document search returned shard failures: {}", - Arrays.toString(searchResponse.getShardFailures())); - } + ActionListener> searchStatsListener = ActionListener.wrap( + stats -> { List allStateAndStats = response.getTransformsStateAndStats(); - for(SearchHit hit : searchResponse.getHits().getHits()) { - BytesReference source = hit.getSourceRef(); - try { - DataFrameIndexerTransformStats stats = parseFromSource(source); - allStateAndStats.add(DataFrameTransformStateAndStats.initialStateAndStats(stats.getTransformId(), stats)); - transformsWithoutTasks.remove(stats.getTransformId()); - } catch (IOException e) { - listener.onFailure(new ElasticsearchParseException("Could not parse data frame transform stats", e)); - return; - } - } + allStateAndStats.addAll(stats); + transformsWithoutTasks.removeAll( + stats.stream().map(DataFrameTransformStateAndStats::getId).collect(Collectors.toSet())); + + // Transforms that have not been started and have no state or stats. transformsWithoutTasks.forEach(transformId -> allStateAndStats.add(DataFrameTransformStateAndStats.initialStateAndStats(transformId))); @@ -190,7 +148,7 @@ private void collectStatsForTransformsWithoutTasks(Request request, // it can easily become arbitrarily ordered based on which transforms don't have a task or stats docs allStateAndStats.sort(Comparator.comparing(DataFrameTransformStateAndStats::getId)); - listener.onResponse(new Response(allStateAndStats, response.getTaskFailures(), nodeFailures)); + listener.onResponse(new Response(allStateAndStats, response.getTaskFailures(), response.getNodeFailures())); }, e -> { if (e instanceof IndexNotFoundException) { @@ -201,26 +159,6 @@ private void collectStatsForTransformsWithoutTasks(Request request, } ); - QueryBuilder builder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() - .filter(QueryBuilders.termsQuery(DataFrameField.ID.getPreferredName(), transformsWithoutTasks)) - .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameIndexerTransformStats.NAME))); - - SearchRequest searchRequest = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) - .addSort(DataFrameField.ID.getPreferredName(), SortOrder.ASC) - .setQuery(builder) - .request(); - - ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), - ClientHelper.DATA_FRAME_ORIGIN, - searchRequest, - searchStatsListener, client::search); - } - - private static DataFrameIndexerTransformStats parseFromSource(BytesReference source) throws IOException { - try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { - return DataFrameIndexerTransformStats.fromXContent(parser); - } + dataFrameTransformsConfigManager.getTransformStats(transformsWithoutTasks, searchStatsListener); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java index 0b8ef692cdd8c..997739b2407a7 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.dataframe.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; @@ -63,8 +61,6 @@ public class TransportPutDataFrameTransformAction extends TransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportPutDataFrameTransformAction.class); - private final XPackLicenseState licenseState; private final Client client; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformTaskAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformTaskAction.java index 9f016b58f3b5f..f8e3a3f1e852f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformTaskAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformTaskAction.java @@ -59,7 +59,7 @@ protected void doExecute(Task task, StartDataFrameTransformTaskAction.Request re protected void taskOperation(StartDataFrameTransformTaskAction.Request request, DataFrameTransformTask transformTask, ActionListener listener) { if (transformTask.getTransformId().equals(request.getId())) { - transformTask.start(listener); + transformTask.start(null, listener); } else { listener.onFailure(new RuntimeException("ID of data frame transform task [" + transformTask.getTransformId() + "] does not match request's ID [" + request.getId() + "]")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java index 2092493caaf4c..26f5259c69dc8 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java @@ -5,26 +5,25 @@ */ package org.elasticsearch.xpack.dataframe.action; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.action.util.PageParams; -import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.dataframe.action.StopDataFrameTransformAction; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; @@ -35,39 +34,56 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.ExceptionsHelper.convertToElastic; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; - public class TransportStopDataFrameTransformAction extends TransportTasksAction { - private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); private final ThreadPool threadPool; private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; + private final PersistentTasksService persistentTasksService; @Inject public TransportStopDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, ThreadPool threadPool, + PersistentTasksService persistentTasksService, DataFrameTransformsConfigManager dataFrameTransformsConfigManager) { super(StopDataFrameTransformAction.NAME, clusterService, transportService, actionFilters, StopDataFrameTransformAction.Request::new, StopDataFrameTransformAction.Response::new, StopDataFrameTransformAction.Response::new, ThreadPool.Names.SAME); this.threadPool = threadPool; this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; + this.persistentTasksService = persistentTasksService; } @Override protected void doExecute(Task task, StopDataFrameTransformAction.Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + if (nodes.isLocalNodeElectedMaster() == false) { + // Delegates stop data frame to elected master node so it becomes the coordinating node. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master node")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, StopDataFrameTransformAction.Response::new)); + } + } else { + final ActionListener finalListener; + if (request.waitForCompletion()) { + finalListener = waitForStopListener(request, listener); + } else { + finalListener = listener; + } - dataFrameTransformsConfigManager.expandTransformIds(request.getId(), new PageParams(0, 10_000), ActionListener.wrap( - expandedIds -> { - request.setExpandedIds(new HashSet<>(expandedIds)); - request.setNodes(dataframeNodes(expandedIds, clusterService.state())); - super.doExecute(task, request, listener); - }, - listener::onFailure - )); + dataFrameTransformsConfigManager.expandTransformIds(request.getId(), new PageParams(0, 10_000), ActionListener.wrap( + expandedIds -> { + request.setExpandedIds(new HashSet<>(expandedIds)); + request.setNodes(DataFrameNodes.dataFrameTaskNodes(expandedIds, clusterService.state())); + super.doExecute(task, request, finalListener); + }, + listener::onFailure + )); + } } @Override @@ -89,42 +105,9 @@ protected void taskOperation(StopDataFrameTransformAction.Request request, DataF RestStatus.CONFLICT)); return; } - if (request.waitForCompletion() == false) { - transformTask.stop(listener); - } else { - ActionListener blockingListener = ActionListener.wrap(response -> { - if (response.isStopped()) { - // The Task acknowledged that it is stopped/stopping... wait until the status actually - // changes over before returning. Switch over to Generic threadpool so - // we don't block the network thread - threadPool.generic().execute(() -> { - try { - long untilInNanos = System.nanoTime() + request.getTimeout().getNanos(); - - while (System.nanoTime() - untilInNanos < 0) { - if (transformTask.isStopped()) { - listener.onResponse(response); - return; - } - Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis()); - } - // ran out of time - listener.onFailure(new ElasticsearchTimeoutException( - DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT, - request.getTimeout().getStringRep(), request.getId()))); - } catch (InterruptedException e) { - listener.onFailure(new ElasticsearchException(DataFrameMessages.getMessage( - DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_INTERRUPT, request.getId()), e)); - } - }); - } else { - // Did not acknowledge stop, just return the response - listener.onResponse(response); - } - }, listener::onFailure); - - transformTask.stop(blockingListener); - } + + transformTask.stop(); + listener.onResponse(new StopDataFrameTransformAction.Response(Boolean.TRUE)); } else { listener.onFailure(new RuntimeException("ID of data frame indexer task [" + transformTask.getTransformId() + "] does not match request's ID [" + request.getId() + "]")); @@ -136,48 +119,55 @@ protected StopDataFrameTransformAction.Response newResponse(StopDataFrameTransfo List tasks, List taskOperationFailures, List failedNodeExceptions) { - if (taskOperationFailures.isEmpty() == false) { - throw convertToElastic(taskOperationFailures.get(0).getCause()); - } else if (failedNodeExceptions.isEmpty() == false) { - throw convertToElastic(failedNodeExceptions.get(0)); - } - - // Either the transform doesn't exist (the user didn't create it yet) or was deleted - // after the Stop API executed. - // In either case, let the user know - if (tasks.size() == 0) { - if (taskOperationFailures.isEmpty() == false) { - throw convertToElastic(taskOperationFailures.get(0).getCause()); - } else if (failedNodeExceptions.isEmpty() == false) { - throw convertToElastic(failedNodeExceptions.get(0)); - } else { - // This can happen we the actual task in the node no longer exists, or was never started - return new StopDataFrameTransformAction.Response(true); - } + if (taskOperationFailures.isEmpty() == false || failedNodeExceptions.isEmpty() == false) { + return new StopDataFrameTransformAction.Response(taskOperationFailures, failedNodeExceptions, false); } + // if tasks is empty allMatch is 'vacuously satisfied' boolean allStopped = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isStopped); return new StopDataFrameTransformAction.Response(allStopped); } - static String[] dataframeNodes(List dataFrameIds, ClusterState clusterState) { + private ActionListener + waitForStopListener(StopDataFrameTransformAction.Request request, + ActionListener listener) { - Set executorNodes = new HashSet<>(); + return ActionListener.wrap( + response -> { + // Wait until the persistent task is stopped + // Switch over to Generic threadpool so we don't block the network thread + threadPool.generic().execute(() -> + waitForDataFrameStopped(request.getExpandedIds(), request.getTimeout(), listener)); + }, + listener::onFailure + ); + } - PersistentTasksCustomMetaData tasksMetaData = - PersistentTasksCustomMetaData.getPersistentTasksCustomMetaData(clusterState); + private void waitForDataFrameStopped(Collection persistentTaskIds, TimeValue timeout, + ActionListener listener) { + persistentTasksService.waitForPersistentTasksCondition(persistentTasksCustomMetaData -> { - if (tasksMetaData != null) { - Set dataFrameIdsSet = new HashSet<>(dataFrameIds); + if (persistentTasksCustomMetaData == null) { + return true; + } - Collection> tasks = - tasksMetaData.findTasks(DataFrameField.TASK_NAME, t -> dataFrameIdsSet.contains(t.getId())); + for (String persistentTaskId : persistentTaskIds) { + if (persistentTasksCustomMetaData.getTask(persistentTaskId) != null) { + return false; + } + } + return true; - for (PersistentTasksCustomMetaData.PersistentTask task : tasks) { - executorNodes.add(task.getExecutorNode()); + }, timeout, new ActionListener<>() { + @Override + public void onResponse(Boolean result) { + listener.onResponse(new StopDataFrameTransformAction.Response(Boolean.TRUE)); } - } - return executorNodes.toArray(new String[0]); + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java index 17a49d8b7e834..e28f8005448d9 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java @@ -17,6 +17,9 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.core.dataframe.transforms.DestConfig; import org.elasticsearch.xpack.core.dataframe.transforms.SourceConfig; @@ -50,7 +53,7 @@ public final class DataFrameInternalIndex { public static final String RAW = "raw"; // data types - public static final String DOUBLE = "double"; + public static final String FLOAT = "float"; public static final String LONG = "long"; public static final String KEYWORD = "keyword"; @@ -129,7 +132,7 @@ private static XContentBuilder mappings() throws IOException { // add the schema for transform configurations addDataFrameTransformsConfigMappings(builder); // add the schema for transform stats - addDataFrameTransformsStatsMappings(builder); + addDataFrameTransformStateAndStatsMappings(builder); // end type builder.endObject(); // end properties @@ -140,37 +143,76 @@ private static XContentBuilder mappings() throws IOException { } - private static XContentBuilder addDataFrameTransformsStatsMappings(XContentBuilder builder) throws IOException { + private static XContentBuilder addDataFrameTransformStateAndStatsMappings(XContentBuilder builder) throws IOException { return builder - .startObject(DataFrameIndexerTransformStats.NUM_PAGES.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.NUM_INPUT_DOCUMENTS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.NUM_OUTPUT_DOCUMENTS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.NUM_INVOCATIONS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.INDEX_TOTAL.getPreferredName()) - .field(TYPE, LONG) - .endObject() - .startObject(DataFrameIndexerTransformStats.SEARCH_TOTAL.getPreferredName()) - .field(TYPE, LONG) + .startObject(DataFrameTransformStateAndStats.STATE_FIELD.getPreferredName()) + .startObject(PROPERTIES) + .startObject(DataFrameTransformState.TASK_STATE.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject() + .startObject(DataFrameTransformState.INDEXER_STATE.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject() + .startObject(DataFrameTransformState.CURRENT_POSITION.getPreferredName()) + .field(ENABLED, false) + .endObject() + .startObject(DataFrameTransformState.CHECKPOINT.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameTransformState.REASON.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject() + .startObject(DataFrameTransformState.PROGRESS.getPreferredName()) + .startObject(PROPERTIES) + .startObject(DataFrameTransformProgress.TOTAL_DOCS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameTransformProgress.DOCS_REMAINING.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameTransformProgress.PERCENT_COMPLETE) + .field(TYPE, FLOAT) + .endObject() + .endObject() + .endObject() + .endObject() .endObject() - .startObject(DataFrameIndexerTransformStats.SEARCH_FAILURES.getPreferredName()) - .field(TYPE, LONG) + .startObject(DataFrameField.STATS_FIELD.getPreferredName()) + .startObject(PROPERTIES) + .startObject(DataFrameIndexerTransformStats.NUM_PAGES.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_INPUT_DOCUMENTS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_OUTPUT_DOCUMENTS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.NUM_INVOCATIONS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_TIME_IN_MS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_TIME_IN_MS.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_TOTAL.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_TOTAL.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.SEARCH_FAILURES.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .startObject(DataFrameIndexerTransformStats.INDEX_FAILURES.getPreferredName()) + .field(TYPE, LONG) + .endObject() + .endObject() .endObject() - .startObject(DataFrameIndexerTransformStats.INDEX_FAILURES.getPreferredName()) - .field(TYPE, LONG) + .startObject(DataFrameTransformStateAndStats.CHECKPOINTING_INFO_FIELD.getPreferredName()) + .field(ENABLED, false) .endObject(); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index e8c1e012b7b30..ab893545a0d50 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -44,13 +44,14 @@ import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpoint; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -274,13 +275,13 @@ public void deleteTransform(String transformId, ActionListener listener })); } - public void putOrUpdateTransformStats(DataFrameIndexerTransformStats stats, ActionListener listener) { + public void putOrUpdateTransformStats(DataFrameTransformStateAndStats stats, ActionListener listener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = stats.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); IndexRequest indexRequest = new IndexRequest(DataFrameInternalIndex.INDEX_NAME) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .id(DataFrameIndexerTransformStats.documentId(stats.getTransformId())) + .id(DataFrameTransformStateAndStats.documentId(stats.getTransformId())) .source(source); executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap( @@ -297,8 +298,8 @@ public void putOrUpdateTransformStats(DataFrameIndexerTransformStats stats, Acti } } - public void getTransformStats(String transformId, ActionListener resultListener) { - GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameIndexerTransformStats.documentId(transformId)); + public void getTransformStats(String transformId, ActionListener resultListener) { + GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameTransformStateAndStats.documentId(transformId)); executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(getResponse -> { if (getResponse.isExists() == false) { @@ -310,7 +311,7 @@ public void getTransformStats(String transformId, ActionListener transformIds, ActionListener> listener) { + + QueryBuilder builder = QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termsQuery(DataFrameField.ID.getPreferredName(), transformIds)) + .filter(QueryBuilders.termQuery(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), DataFrameTransformStateAndStats.NAME))); + + SearchRequest searchRequest = client.prepareSearch(DataFrameInternalIndex.INDEX_NAME) + .addSort(DataFrameField.ID.getPreferredName(), SortOrder.ASC) + .setQuery(builder) + .request(); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), DATA_FRAME_ORIGIN, searchRequest, + ActionListener.wrap( + searchResponse -> { + List stats = new ArrayList<>(); + for (SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try (InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + stats.add(DataFrameTransformStateAndStats.fromXContent(parser)); + } catch (IOException e) { + listener.onFailure( + new ElasticsearchParseException("failed to parse data frame stats from search hit", e)); + return; + } + } + + listener.onResponse(stats); + }, + e -> { + if (e.getClass() == IndexNotFoundException.class) { + listener.onResponse(Collections.emptyList()); + } else { + listener.onFailure(e); + } + } + ), client::search); + } + private void parseTransformLenientlyFromSource(BytesReference source, String transformId, ActionListener transformListener) { try (InputStream stream = source.streamInput(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/BaseTasksResponseToXContentListener.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/BaseTasksResponseToXContentListener.java new file mode 100644 index 0000000000000..def26a52efb87 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/BaseTasksResponseToXContentListener.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.rest.action; + +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; + + +class BaseTasksResponseToXContentListener extends RestToXContentListener { + + BaseTasksResponseToXContentListener(RestChannel channel) { + super(channel); + } + + @Override + protected RestStatus getStatus(T response) { + if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { + return RestStatus.INTERNAL_SERVER_ERROR; + } + return RestStatus.OK; + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java index 0efa3ffa2c5fd..125e61b5021e4 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.DeleteDataFrameTransformAction; @@ -35,15 +34,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient DeleteDataFrameTransformAction.Request request = new DeleteDataFrameTransformAction.Request(id); return channel -> client.execute(DeleteDataFrameTransformAction.INSTANCE, request, - new RestToXContentListener(channel) { - @Override - protected RestStatus getStatus(DeleteDataFrameTransformAction.Response response) { - if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { - return RestStatus.INTERNAL_SERVER_ERROR; - } - return RestStatus.OK; - } - }); + new RestToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java index 87cc13edbc329..f2d14f8106958 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.GetDataFrameTransformsStatsAction; @@ -33,7 +32,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); } - return channel -> client.execute(GetDataFrameTransformsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(GetDataFrameTransformsStatsAction.INSTANCE, request, + new BaseTasksResponseToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java index 1d9b3f29a6133..764aeca4a6480 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformAction; @@ -31,7 +30,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient boolean force = restRequest.paramAsBoolean(DataFrameField.FORCE.getPreferredName(), false); StartDataFrameTransformAction.Request request = new StartDataFrameTransformAction.Request(id, force); request.timeout(restRequest.paramAsTime(DataFrameField.TIMEOUT.getPreferredName(), AcknowledgedRequest.DEFAULT_ACK_TIMEOUT)); - return channel -> client.execute(StartDataFrameTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(StartDataFrameTransformAction.INSTANCE, request, + new BaseTasksResponseToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java index e93898b905ba1..d34478b9ba941 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.StopDataFrameTransformAction; @@ -34,7 +33,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient StopDataFrameTransformAction.Request request = new StopDataFrameTransformAction.Request(id, waitForCompletion, force, timeout); - return channel -> client.execute(StopDataFrameTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(StopDataFrameTransformAction.INSTANCE, request, + new BaseTasksResponseToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java index d0f15197c3cca..9ed8da61d8feb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java @@ -26,10 +26,10 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformTaskAction; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; @@ -106,46 +106,47 @@ static List verifyIndicesPrimaryShardsAreActive(ClusterState clusterStat protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameTransform params, PersistentTaskState state) { final String transformId = params.getId(); final DataFrameTransformTask buildTask = (DataFrameTransformTask) task; - final SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(DataFrameTransformTask.SCHEDULE_NAME + "_" + transformId, - next()); - final DataFrameTransformState transformState = (DataFrameTransformState) state; + final DataFrameTransformState transformPTaskState = (DataFrameTransformState) state; final DataFrameTransformTask.ClientDataFrameIndexerBuilder indexerBuilder = - new DataFrameTransformTask.ClientDataFrameIndexerBuilder() + new DataFrameTransformTask.ClientDataFrameIndexerBuilder(transformId) .setAuditor(auditor) .setClient(client) - .setIndexerState(currentIndexerState(transformState)) - .setInitialPosition(transformState == null ? null : transformState.getPosition()) - // If the state is `null` that means this is a "first run". We can safely assume the - // task will attempt to gather the initial progress information - // if we have state, this may indicate the previous execution node crashed, so we should attempt to retrieve - // the progress from state to keep an accurate measurement of our progress - .setProgress(transformState == null ? null : transformState.getProgress()) + .setIndexerState(currentIndexerState(transformPTaskState)) + // If the transform persistent task state is `null` that means this is a "first run". + // If we have state then the task has relocated from another node in which case this + // state is preferred + .setInitialPosition(transformPTaskState == null ? null : transformPTaskState.getPosition()) + .setProgress(transformPTaskState == null ? null : transformPTaskState.getProgress()) .setTransformsCheckpointService(dataFrameTransformsCheckpointService) - .setTransformsConfigManager(transformsConfigManager) - .setTransformId(transformId); + .setTransformsConfigManager(transformsConfigManager); ActionListener startTaskListener = ActionListener.wrap( response -> logger.info("Successfully completed and scheduled task in node operation"), failure -> logger.error("Failed to start task ["+ transformId +"] in node operation", failure) ); + Long previousCheckpoint = transformPTaskState != null ? transformPTaskState.getCheckpoint() : null; + // <3> Set the previous stats (if they exist), initialize the indexer, start the task (If it is STOPPED) // Since we don't create the task until `_start` is called, if we see that the task state is stopped, attempt to start // Schedule execution regardless - ActionListener transformStatsActionListener = ActionListener.wrap( - stats -> { - indexerBuilder.setInitialStats(stats); - buildTask.initializeIndexer(indexerBuilder); - scheduleAndStartTask(buildTask, schedulerJob, startTaskListener); + ActionListener transformStatsActionListener = ActionListener.wrap( + stateAndStats -> { + indexerBuilder.setInitialStats(stateAndStats.getTransformStats()); + if (transformPTaskState == null) { // prefer the persistent task state + indexerBuilder.setInitialPosition(stateAndStats.getTransformState().getPosition()); + indexerBuilder.setProgress(stateAndStats.getTransformState().getProgress()); + } + + final Long checkpoint = previousCheckpoint != null ? previousCheckpoint : stateAndStats.getTransformState().getCheckpoint(); + startTask(buildTask, indexerBuilder, checkpoint, startTaskListener); }, error -> { if (error instanceof ResourceNotFoundException == false) { logger.error("Unable to load previously persisted statistics for transform [" + params.getId() + "]", error); } - indexerBuilder.setInitialStats(new DataFrameIndexerTransformStats(transformId)); - buildTask.initializeIndexer(indexerBuilder); - scheduleAndStartTask(buildTask, schedulerJob, startTaskListener); + startTask(buildTask, indexerBuilder, previousCheckpoint, startTaskListener); } ); @@ -218,30 +219,24 @@ private void markAsFailed(DataFrameTransformTask task, String reason) { } } - private void scheduleAndStartTask(DataFrameTransformTask buildTask, - SchedulerEngine.Job schedulerJob, - ActionListener listener) { - // Note that while the task is added to the scheduler here, the internal state will prevent - // it from doing any work until the task is "started" via the StartTransform api - schedulerEngine.register(buildTask); - schedulerEngine.add(schedulerJob); - logger.info("Data frame transform [{}] created.", buildTask.getTransformId()); + private void startTask(DataFrameTransformTask buildTask, + DataFrameTransformTask.ClientDataFrameIndexerBuilder indexerBuilder, + Long previousCheckpoint, + ActionListener listener) { // If we are stopped, and it is an initial run, this means we have never been started, // attempt to start the task + + buildTask.initializeIndexer(indexerBuilder); + // TODO isInitialRun is false after relocation?? if (buildTask.getState().getTaskState().equals(DataFrameTransformTaskState.STOPPED) && buildTask.isInitialRun()) { - buildTask.start(listener); + logger.info("Data frame transform [{}] created.", buildTask.getTransformId()); + buildTask.start(previousCheckpoint, listener); } else { logger.debug("No need to start task. Its current state is: {}", buildTask.getState().getIndexerState()); listener.onResponse(new StartDataFrameTransformTaskAction.Response(true)); } } - static SchedulerEngine.Schedule next() { - return (startTime, now) -> { - return now + 1000; // to be fixed, hardcode something - }; - } - @Override protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 2020300a0cf77..9df6b5e3ab337 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -27,12 +27,13 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformTaskAction; import org.elasticsearch.xpack.core.dataframe.action.StartDataFrameTransformTaskAction.Response; -import org.elasticsearch.xpack.core.dataframe.action.StopDataFrameTransformAction; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointingInfo; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformProgress; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState; import org.elasticsearch.xpack.core.dataframe.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.dataframe.checkpoint.DataFrameTransformsCheckpointService; import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; +import org.elasticsearch.xpack.dataframe.transforms.pivot.AggregationResultUtils; import java.util.Arrays; import java.util.Map; @@ -85,7 +87,7 @@ public DataFrameTransformTask(long id, String type, String action, TaskId parent String initialReason = null; long initialGeneration = 0; Map initialPosition = null; - logger.info("[{}] init, got state: [{}]", transform.getId(), state != null); + logger.trace("[{}] init, got state: [{}]", transform.getId(), state != null); if (state != null) { initialTaskState = state.getTaskState(); initialReason = state.getReason(); @@ -181,7 +183,13 @@ boolean isInitialRun() { return getIndexer() != null && getIndexer().initialRun(); } - public synchronized void start(ActionListener listener) { + /** + * Start the background indexer and set the task's state to started + * @param startingCheckpoint Set the current checkpoint to this value. If null the + * current checkpoint is not set + * @param listener Started listener + */ + public synchronized void start(Long startingCheckpoint, ActionListener listener) { if (getIndexer() == null) { listener.onFailure(new ElasticsearchException("Task for transform [{}] not fully initialized. Try again later", getTransformId())); @@ -195,6 +203,9 @@ public synchronized void start(ActionListener listener) { } stateReason.set(null); taskState.set(DataFrameTransformTaskState.STARTED); + if (startingCheckpoint != null) { + currentCheckpoint.set(startingCheckpoint); + } final DataFrameTransformState state = new DataFrameTransformState( DataFrameTransformTaskState.STARTED, @@ -208,6 +219,10 @@ public synchronized void start(ActionListener listener) { persistStateToClusterState(state, ActionListener.wrap( task -> { auditor.info(transform.getId(), "Updated state to [" + state.getTaskState() + "]"); + long now = System.currentTimeMillis(); + // kick off the indexer + triggered(new Event(schedulerJobName(), now, now)); + registerWithSchedulerJob(); listener.onResponse(new StartDataFrameTransformTaskAction.Response(true)); }, exc -> { @@ -218,51 +233,17 @@ public synchronized void start(ActionListener listener) { )); } - public synchronized void stop(ActionListener listener) { + public synchronized void stop() { if (getIndexer() == null) { - listener.onFailure(new ElasticsearchException("Task for transform [{}] not fully initialized. Try again later", - getTransformId())); return; } // taskState is initialized as STOPPED and is updated in tandem with the indexerState // Consequently, if it is STOPPED, we consider the whole task STOPPED. if (taskState.get() == DataFrameTransformTaskState.STOPPED) { - listener.onResponse(new StopDataFrameTransformAction.Response(true)); return; } - final IndexerState newState = getIndexer().stop(); - switch (newState) { - case STOPPED: - // Fall through to `STOPPING` as the behavior is the same for both, we should persist for both - case STOPPING: - // update the persistent state to STOPPED. There are two scenarios and both are safe: - // 1. we persist STOPPED now, indexer continues a bit then sees the flag and checkpoints another STOPPED with the more recent - // position. - // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up at last checkpoint, - // overwrite some docs and eventually checkpoint. - taskState.set(DataFrameTransformTaskState.STOPPED); - DataFrameTransformState state = new DataFrameTransformState( - DataFrameTransformTaskState.STOPPED, - IndexerState.STOPPED, - getIndexer().getPosition(), - currentCheckpoint.get(), - stateReason.get(), - getIndexer().getProgress()); - persistStateToClusterState(state, ActionListener.wrap( - task -> { - auditor.info(transform.getId(), "Updated state to [" + state.getTaskState() + "]"); - listener.onResponse(new StopDataFrameTransformAction.Response(true)); - }, - exc -> listener.onFailure(new ElasticsearchException( - "Error while updating state for data frame transform [{}] to [{}]", exc, - transform.getId(), - state.getIndexerState())))); - break; - default: - listener.onFailure(new ElasticsearchException("Cannot stop task for data frame transform [{}], because state was [{}]", - transform.getId(), newState)); - break; - } + + getIndexer().stop(); } @Override @@ -272,7 +253,7 @@ public synchronized void triggered(Event event) { return; } // for now no rerun, so only trigger if checkpoint == 0 - if (currentCheckpoint.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + transform.getId())) { + if (currentCheckpoint.get() == 0 && event.getJobName().equals(schedulerJobName())) { logger.debug("Data frame indexer [{}] schedule has triggered, state: [{}]", event.getJobName(), getIndexer().getState()); getIndexer().maybeTriggerAsyncJob(System.currentTimeMillis()); } @@ -280,18 +261,10 @@ public synchronized void triggered(Event event) { /** * Attempt to gracefully cleanup the data frame transform so it can be terminated. - * This tries to remove the job from the scheduler, and potentially any other - * cleanup operations in the future + * This tries to remove the job from the scheduler and completes the persistent task */ synchronized void shutdown() { - try { - logger.info("Data frame indexer [" + transform.getId() + "] received abort request, stopping indexer."); - schedulerEngine.remove(SCHEDULE_NAME + "_" + transform.getId()); - schedulerEngine.unregister(this); - } catch (Exception e) { - markAsFailed(e); - return; - } + deregisterSchedulerJob(); markAsCompleted(); } @@ -347,6 +320,27 @@ public synchronized void onCancelled() { } } + private void registerWithSchedulerJob() { + schedulerEngine.register(this); + final SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(schedulerJobName(), next()); + schedulerEngine.add(schedulerJob); + } + + private void deregisterSchedulerJob() { + schedulerEngine.remove(schedulerJobName()); + schedulerEngine.unregister(this); + } + + private String schedulerJobName() { + return DataFrameTransformTask.SCHEDULE_NAME + "_" + getTransformId(); + } + + private SchedulerEngine.Schedule next() { + return (startTime, now) -> { + return now + 1000; // to be fixed, hardcode something + }; + } + synchronized void initializeIndexer(ClientDataFrameIndexerBuilder indexerBuilder) { indexer.set(indexerBuilder.build(this)); } @@ -364,6 +358,11 @@ static class ClientDataFrameIndexerBuilder { private Map initialPosition; private DataFrameTransformProgress progress; + ClientDataFrameIndexerBuilder(String transformId) { + this.transformId = transformId; + this.initialStats = new DataFrameIndexerTransformStats(transformId); + } + ClientDataFrameIndexer build(DataFrameTransformTask parentTask) { return new ClientDataFrameIndexer(this.transformId, this.transformsConfigManager, @@ -555,7 +554,9 @@ protected void doSaveState(IndexerState indexerState, Map positi task -> { // Only persist the stats if something has actually changed if (previouslyPersistedStats == null || previouslyPersistedStats.equals(getStats()) == false) { - transformsConfigManager.putOrUpdateTransformStats(getStats(), + transformsConfigManager.putOrUpdateTransformStats( + new DataFrameTransformStateAndStats(transformId, state, getStats(), + DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null ActionListener.wrap( r -> { previouslyPersistedStats = getStats(); @@ -612,6 +613,24 @@ protected void onFinish(ActionListener listener) { } } + @Override + protected void onStop() { + auditor.info(transformConfig.getId(), "Indexer has stopped"); + logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId()); + transformsConfigManager.putOrUpdateTransformStats( + new DataFrameTransformStateAndStats(transformId, transformTask.getState(), getStats(), + DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null + ActionListener.wrap( + r -> { + transformTask.shutdown(); + }, + statsExc -> { + transformTask.shutdown(); + logger.error("Updating saving stats of transform [" + transformConfig.getId() + "] failed", statsExc); + } + )); + } + @Override protected void onAbort() { auditor.info(transformConfig.getId(), "Received abort request, stopping indexer"); @@ -636,7 +655,7 @@ protected void createCheckpoint(ActionListener listener) { } private boolean isIrrecoverableFailure(Exception e) { - return e instanceof IndexNotFoundException; + return e instanceof IndexNotFoundException || e instanceof AggregationResultUtils.AggregationExtractionException; } synchronized void handleFailure(Exception e) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java index b17a65fc4daf1..f8857591b2322 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java @@ -8,10 +8,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.metrics.GeoCentroid; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; import org.elasticsearch.search.aggregations.metrics.ScriptedMetric; @@ -29,7 +31,7 @@ import static org.elasticsearch.xpack.dataframe.transforms.pivot.SchemaUtil.isNumericType; -final class AggregationResultUtils { +public final class AggregationResultUtils { private static final Logger logger = LogManager.getLogger(AggregationResultUtils.class); /** @@ -77,17 +79,20 @@ public static Stream> extractCompositeAggregationResults(Com // gather the `value` type, otherwise utilize `getValueAsString` so we don't lose formatted outputs. if (isNumericType(fieldType) || (aggResultSingleValue.getValueAsString().equals(String.valueOf(aggResultSingleValue.value())))) { - document.put(aggName, aggResultSingleValue.value()); + updateDocument(document, aggName, aggResultSingleValue.value()); } else { - document.put(aggName, aggResultSingleValue.getValueAsString()); + updateDocument(document, aggName, aggResultSingleValue.getValueAsString()); } } else if (aggResult instanceof ScriptedMetric) { - document.put(aggName, ((ScriptedMetric) aggResult).aggregation()); + updateDocument(document, aggName, ((ScriptedMetric) aggResult).aggregation()); + } else if (aggResult instanceof GeoCentroid) { + updateDocument(document, aggName, ((GeoCentroid) aggResult).centroid().toString()); } else { // Execution should never reach this point! // Creating transforms with unsupported aggregations shall not be possible - logger.error("Dataframe Internal Error: unsupported aggregation ["+ aggResult.getName() +"], ignoring"); - assert false; + throw new AggregationExtractionException("unsupported aggregation [{}] with name [{}]", + aggResult.getType(), + aggResult.getName()); } } @@ -97,4 +102,49 @@ public static Stream> extractCompositeAggregationResults(Com }); } + @SuppressWarnings("unchecked") + static void updateDocument(Map document, String fieldName, Object value) { + String[] fieldTokens = fieldName.split("\\."); + if (fieldTokens.length == 1) { + document.put(fieldName, value); + return; + } + Map internalMap = document; + for (int i = 0; i < fieldTokens.length; i++) { + String token = fieldTokens[i]; + if (i == fieldTokens.length - 1) { + if (internalMap.containsKey(token)) { + if (internalMap.get(token) instanceof Map) { + throw new AggregationExtractionException("mixed object types of nested and non-nested fields [{}]", + fieldName); + } else { + throw new AggregationExtractionException("duplicate key value pairs key [{}] old value [{}] duplicate value [{}]", + fieldName, + internalMap.get(token), + value); + } + } + internalMap.put(token, value); + } else { + if (internalMap.containsKey(token)) { + if (internalMap.get(token) instanceof Map) { + internalMap = (Map)internalMap.get(token); + } else { + throw new AggregationExtractionException("mixed object types of nested and non-nested fields [{}]", + fieldName); + } + } else { + Map newMap = new HashMap<>(); + internalMap.put(token, newMap); + internalMap = newMap; + } + } + } + } + + public static class AggregationExtractionException extends ElasticsearchException { + AggregationExtractionException(String msg, Object... args) { + super(msg, args); + } + } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java index e7257c463ce7d..615c9b2e8d2e6 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java @@ -35,6 +35,7 @@ enum AggregationType { MAX("max", SOURCE), MIN("min", SOURCE), SUM("sum", SOURCE), + GEO_CENTROID("geo_centroid", "geo_point"), SCRIPTED_METRIC("scripted_metric", DYNAMIC), BUCKET_SCRIPT("bucket_script", DYNAMIC); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java index 0e5231442d18b..8205f2576da68 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java @@ -76,13 +76,15 @@ public void deduceMappings(Client client, SourceConfig sourceConfig, final Actio * the page size, the type of aggregations and the data. As the page size is the number of buckets we return * per page the page size is a multiplier for the costs of aggregating bucket. * - * Initially this returns a default, in future it might inspect the configuration and base the initial size - * on the aggregations used. + * The user may set a maximum in the {@link PivotConfig#getMaxPageSearchSize()}, but if that is not provided, + * the default {@link Pivot#DEFAULT_INITIAL_PAGE_SIZE} is used. + * + * In future we might inspect the configuration and base the initial size on the aggregations used. * * @return the page size */ public int getInitialPageSize() { - return DEFAULT_INITIAL_PAGE_SIZE; + return config.getMaxPageSearchSize() == null ? DEFAULT_INITIAL_PAGE_SIZE : config.getMaxPageSearchSize(); } public SearchRequest buildSearchRequest(SourceConfig sourceConfig, Map position, int pageSize) { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodesTests.java similarity index 85% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformActionTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodesTests.java index ddc7ddd4f1b9c..ba549aa7e8bb4 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameNodesTests.java @@ -18,13 +18,12 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform; -import java.io.IOException; import java.util.Arrays; import java.util.Collections; import static org.hamcrest.Matchers.hasItemInArray; -public class TransportStopDataFrameTransformActionTests extends ESTestCase { +public class DataFrameNodesTests extends ESTestCase { public void testDataframeNodes() { String dataFrameIdFoo = "df-id-foo"; @@ -49,12 +48,12 @@ public Version getMinimalSupportedVersion() { } @Override - public void writeTo(StreamOutput out) throws IOException { + public void writeTo(StreamOutput out) { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params) { return null; } }, @@ -64,7 +63,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws .metaData(MetaData.builder().putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())) .build(); - String[] nodes = TransportStopDataFrameTransformAction.dataframeNodes(Arrays.asList(dataFrameIdFoo, dataFrameIdBar), cs); + String[] nodes = DataFrameNodes.dataFrameTaskNodes(Arrays.asList(dataFrameIdFoo, dataFrameIdBar), cs); assertEquals(2, nodes.length); assertThat(nodes, hasItemInArray("node-1")); assertThat(nodes, hasItemInArray("node-2")); @@ -72,7 +71,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void testDataframeNodes_NoTasks() { ClusterState emptyState = ClusterState.builder(new ClusterName("_name")).build(); - String[] nodes = TransportStopDataFrameTransformAction.dataframeNodes(Collections.singletonList("df-id"), emptyState); + String[] nodes = DataFrameNodes.dataFrameTaskNodes(Collections.singletonList("df-id"), emptyState); assertEquals(0, nodes.length); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java index 36ae4f3f162a0..9c7af3efa5333 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java @@ -14,12 +14,17 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformCheckpointTests; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; +import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStatsTests; import org.elasticsearch.xpack.dataframe.DataFrameSingleNodeTestCase; import org.junit.Before; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -217,4 +222,40 @@ public void testExpandIds() throws Exception { }); } + + public void testStateAndStats() throws InterruptedException { + String transformId = "transform_test_stats_create_read_update"; + + DataFrameTransformStateAndStats stateAndStats = + DataFrameTransformStateAndStatsTests.randomDataFrameTransformStateAndStats(transformId); + + assertAsync(listener -> transformsConfigManager.putOrUpdateTransformStats(stateAndStats, listener), Boolean.TRUE, null, null); + assertAsync(listener -> transformsConfigManager.getTransformStats(transformId, listener), stateAndStats, null, null); + + DataFrameTransformStateAndStats updated = + DataFrameTransformStateAndStatsTests.randomDataFrameTransformStateAndStats(transformId); + assertAsync(listener -> transformsConfigManager.putOrUpdateTransformStats(updated, listener), Boolean.TRUE, null, null); + assertAsync(listener -> transformsConfigManager.getTransformStats(transformId, listener), updated, null, null); + } + + public void testGetStateAndStatsMultiple() throws InterruptedException { + int numStats = randomInt(5); + List expectedStats = new ArrayList<>(); + for (int i=0; i transformsConfigManager.putOrUpdateTransformStats(stat, listener), Boolean.TRUE, null, null); + } + + // remove one of the put stats so we don't retrieve all + if (expectedStats.size() > 1) { + expectedStats.remove(expectedStats.size() -1); + } + List ids = expectedStats.stream().map(DataFrameTransformStateAndStats::getId).collect(Collectors.toList()); + + // get stats will be ordered by id + expectedStats.sort(Comparator.comparing(DataFrameTransformStateAndStats::getId)); + assertAsync(listener -> transformsConfigManager.getTransformStats(ids, listener), expectedStats, null, null); + } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexerTests.java index f3f3255f07a6d..43198c6edfcf3 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexerTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexerTests.java @@ -23,7 +23,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; -import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.core.dataframe.transforms.pivot.AggregationConfigTests; +import org.elasticsearch.xpack.core.dataframe.transforms.pivot.GroupConfigTests; +import org.elasticsearch.xpack.core.dataframe.transforms.pivot.PivotConfig; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.dataframe.notifications.DataFrameAuditor; import org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot; @@ -39,7 +41,10 @@ import java.util.function.Consumer; import java.util.function.Function; +import static org.elasticsearch.xpack.core.dataframe.transforms.DestConfigTests.randomDestConfig; +import static org.elasticsearch.xpack.core.dataframe.transforms.SourceConfigTests.randomSourceConfig; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -169,9 +174,15 @@ public void setUpMocks() { } public void testPageSizeAdapt() throws InterruptedException { - DataFrameTransformConfig config = DataFrameTransformConfigTests.randomDataFrameTransformConfig(); + Integer pageSize = randomBoolean() ? null : randomIntBetween(500, 10_000); + DataFrameTransformConfig config = new DataFrameTransformConfig(randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomAggregationConfig(), pageSize), + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000)); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - + final long initialPageSize = pageSize == null ? Pivot.DEFAULT_INITIAL_PAGE_SIZE : pageSize; Function searchFunction = searchRequest -> { throw new SearchPhaseExecutionException("query", "Partial shards failure", new ShardSearchFailure[] { new ShardSearchFailure(new CircuitBreakingException("to much memory", 110, 100, Durability.TRANSIENT)) }); @@ -179,9 +190,7 @@ public void testPageSizeAdapt() throws InterruptedException { Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); - Consumer failureConsumer = e -> { - fail("expected circuit breaker exception to be handled"); - }; + Consumer failureConsumer = e -> fail("expected circuit breaker exception to be handled"); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -197,8 +206,8 @@ public void testPageSizeAdapt() throws InterruptedException { latch.countDown(); awaitBusy(() -> indexer.getState() == IndexerState.STOPPED); long pageSizeAfterFirstReduction = indexer.getPageSize(); - assertTrue(Pivot.DEFAULT_INITIAL_PAGE_SIZE > pageSizeAfterFirstReduction); - assertTrue(pageSizeAfterFirstReduction > DataFrameIndexer.MINIMUM_PAGE_SIZE); + assertThat(initialPageSize, greaterThan(pageSizeAfterFirstReduction)); + assertThat(pageSizeAfterFirstReduction, greaterThan((long)DataFrameIndexer.MINIMUM_PAGE_SIZE)); // run indexer a 2nd time final CountDownLatch secondRunLatch = indexer.newLatch(1); @@ -211,8 +220,8 @@ public void testPageSizeAdapt() throws InterruptedException { awaitBusy(() -> indexer.getState() == IndexerState.STOPPED); // assert that page size has been reduced again - assertTrue(pageSizeAfterFirstReduction > indexer.getPageSize()); - assertTrue(pageSizeAfterFirstReduction > DataFrameIndexer.MINIMUM_PAGE_SIZE); + assertThat(pageSizeAfterFirstReduction, greaterThan((long)indexer.getPageSize())); + assertThat(pageSizeAfterFirstReduction, greaterThan((long)DataFrameIndexer.MINIMUM_PAGE_SIZE)); } finally { executor.shutdownNow(); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java index 7eb4295111324..1a835c9d19b59 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java @@ -66,6 +66,7 @@ import java.util.stream.Collectors; import static java.util.Arrays.asList; +import static org.hamcrest.CoreMatchers.equalTo; public class AggregationResultUtilsTests extends ESTestCase { @@ -736,6 +737,51 @@ aggTypedName, asMap( assertEquals(documentIdsFirstRun, documentIdsSecondRun); } + @SuppressWarnings("unchecked") + public void testUpdateDocument() { + Map document = new HashMap<>(); + + AggregationResultUtils.updateDocument(document, "foo.bar.baz", 1000L); + AggregationResultUtils.updateDocument(document, "foo.bar.baz2", 2000L); + AggregationResultUtils.updateDocument(document, "bar.field1", 1L); + AggregationResultUtils.updateDocument(document, "metric", 10L); + + assertThat(document.get("metric"), equalTo(10L)); + + Map bar = (Map)document.get("bar"); + + assertThat(bar.get("field1"), equalTo(1L)); + + Map foo = (Map)document.get("foo"); + Map foobar = (Map)foo.get("bar"); + + assertThat(foobar.get("baz"), equalTo(1000L)); + assertThat(foobar.get("baz2"), equalTo(2000L)); + } + + public void testUpdateDocumentWithDuplicate() { + Map document = new HashMap<>(); + + AggregationResultUtils.updateDocument(document, "foo.bar.baz", 1000L); + AggregationResultUtils.AggregationExtractionException exception = + expectThrows(AggregationResultUtils.AggregationExtractionException.class, + () -> AggregationResultUtils.updateDocument(document, "foo.bar.baz", 2000L)); + assertThat(exception.getMessage(), + equalTo("duplicate key value pairs key [foo.bar.baz] old value [1000] duplicate value [2000]")); + } + + public void testUpdateDocumentWithObjectAndNotObject() { + Map document = new HashMap<>(); + + AggregationResultUtils.updateDocument(document, "foo.bar.baz", 1000L); + AggregationResultUtils.AggregationExtractionException exception = + expectThrows(AggregationResultUtils.AggregationExtractionException.class, + () -> AggregationResultUtils.updateDocument(document, "foo.bar", 2000L)); + assertThat(exception.getMessage(), + equalTo("mixed object types of nested and non-nested fields [foo.bar]")); + } + + private void executeTest(GroupConfig groups, Collection aggregationBuilders, Collection pipelineAggregationBuilders, diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java index 5fb8463ae5412..8443699430a2a 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java @@ -38,11 +38,15 @@ public void testResolveTargetMapping() { assertEquals("double", Aggregations.resolveTargetMapping("sum", "double")); assertEquals("half_float", Aggregations.resolveTargetMapping("sum", "half_float")); + // geo_centroid + assertEquals("geo_point", Aggregations.resolveTargetMapping("geo_centroid", "geo_point")); + assertEquals("geo_point", Aggregations.resolveTargetMapping("geo_centroid", null)); + // scripted_metric assertEquals("_dynamic", Aggregations.resolveTargetMapping("scripted_metric", null)); assertEquals("_dynamic", Aggregations.resolveTargetMapping("scripted_metric", "int")); - // scripted_metric + // bucket_script assertEquals("_dynamic", Aggregations.resolveTargetMapping("bucket_script", null)); assertEquals("_dynamic", Aggregations.resolveTargetMapping("bucket_script", "int")); } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index 4c434cdbee7fd..20ea84502ed82 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -107,6 +107,16 @@ public void testValidateNonExistingIndex() throws Exception { assertInvalidTransform(client, source, pivot); } + public void testInitialPageSize() throws Exception { + int expectedPageSize = 1000; + + Pivot pivot = new Pivot(new PivotConfig(GroupConfigTests.randomGroupConfig(), getValidAggregationConfig(), expectedPageSize)); + assertThat(pivot.getInitialPageSize(), equalTo(expectedPageSize)); + + pivot = new Pivot(new PivotConfig(GroupConfigTests.randomGroupConfig(), getValidAggregationConfig(), null)); + assertThat(pivot.getInitialPageSize(), equalTo(Pivot.DEFAULT_INITIAL_PAGE_SIZE)); + } + public void testSearchFailure() throws Exception { // test a failure during the search operation, transform creation fails if // search has failures although they might just be temporary @@ -177,11 +187,11 @@ protected void } private PivotConfig getValidPivotConfig() throws IOException { - return new PivotConfig(GroupConfigTests.randomGroupConfig(), getValidAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), getValidAggregationConfig(), null); } private PivotConfig getValidPivotConfig(AggregationConfig aggregationConfig) throws IOException { - return new PivotConfig(GroupConfigTests.randomGroupConfig(), aggregationConfig); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), aggregationConfig, null); } private AggregationConfig getValidAggregationConfig() throws IOException { diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index b0f5a556ac627..57ded2d069d30 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import java.util.List; @@ -19,8 +18,7 @@ public class IndexDeprecationChecksTests extends ESTestCase { public void testOldIndicesCheck() { - Version createdWith = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, - VersionUtils.getPreviousVersion(Version.V_7_0_0)); + Version createdWith = Version.fromString("1.0.0"); IndexMetaData indexMetaData = IndexMetaData.builder("test") .settings(settings(createdWith)) .numberOfShards(1) diff --git a/x-pack/plugin/graph/build.gradle b/x-pack/plugin/graph/build.gradle index e7b0b44fd659b..286f8ac0a8917 100644 --- a/x-pack/plugin/graph/build.gradle +++ b/x-pack/plugin/graph/build.gradle @@ -13,6 +13,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } // add all sub-projects of the qa sub-project diff --git a/x-pack/plugin/ilm/build.gradle b/x-pack/plugin/ilm/build.gradle index e6962e3c3bf72..2b80ddc04207b 100644 --- a/x-pack/plugin/ilm/build.gradle +++ b/x-pack/plugin/ilm/build.gradle @@ -16,6 +16,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } // add all sub-projects of the qa sub-project diff --git a/x-pack/plugin/ilm/qa/rest/build.gradle b/x-pack/plugin/ilm/qa/rest/build.gradle index c69a3dfce2143..75ba234e55569 100644 --- a/x-pack/plugin/ilm/qa/rest/build.gradle +++ b/x-pack/plugin/ilm/qa/rest/build.gradle @@ -4,6 +4,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ilm'), configuration: 'runtime') } diff --git a/x-pack/plugin/logstash/build.gradle b/x-pack/plugin/logstash/build.gradle index 476d3f17cad41..b799dbd4ceb95 100644 --- a/x-pack/plugin/logstash/build.gradle +++ b/x-pack/plugin/logstash/build.gradle @@ -13,6 +13,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } integTest.enabled = false diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 8c17e4b1fc4c6..c09ced75bdf06 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -12,7 +12,7 @@ esplugin { repositories { ivy { - name "prelert-artifacts" + name "ml-cpp" url "https://prelert-artifacts.s3.amazonaws.com" metadataSources { // no repository metadata, look directly for the artifact @@ -53,6 +53,9 @@ dependencies { compileOnly project(path: xpackModule('core'), configuration: 'default') compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } // This should not be here testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index bc0b0ca5b7b03..93dc8c4fec131 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -5,6 +5,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackProject('plugin').path, configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 22fd7837628b9..e6fd8412c948b 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -5,6 +5,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('ml'), configuration: 'runtime') testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index 03860ea9ae044..98d089e544b4e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -124,7 +124,7 @@ public void testTooManyByFields() throws Exception { // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); - assertThat(modelSizeStats.getModelBytes(), lessThan(31500000L)); + assertThat(modelSizeStats.getModelBytes(), lessThan(35000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(25000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -173,7 +173,7 @@ public void testTooManyByAndOverFields() throws Exception { // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); - assertThat(modelSizeStats.getModelBytes(), lessThan(31500000L)); + assertThat(modelSizeStats.getModelBytes(), lessThan(33000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(24000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } @@ -223,7 +223,7 @@ public void testManyDistinctOverFields() throws Exception { // Assert we haven't violated the limit too much GetJobsStatsAction.Response.JobStats jobStats = getJobStats(job.getId()).get(0); ModelSizeStats modelSizeStats = jobStats.getModelSizeStats(); - assertThat(modelSizeStats.getModelBytes(), lessThan(116000000L)); + assertThat(modelSizeStats.getModelBytes(), lessThan(117000000L)); assertThat(modelSizeStats.getModelBytes(), greaterThan(90000000L)); assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/SetUpgradeModeIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/SetUpgradeModeIT.java index 57c9245e2c5b3..f97c27e4ccca4 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/SetUpgradeModeIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/SetUpgradeModeIT.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.core.ml.MlTasks.AWAITING_UPGRADE; import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.createDatafeed; import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.createScheduledJob; import static org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase.getDataCounts; @@ -33,7 +34,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isEmptyString; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -88,12 +88,12 @@ public void testEnableUpgradeMode() throws Exception { GetJobsStatsAction.Response.JobStats jobStats = getJobStats(jobId).get(0); assertThat(jobStats.getState(), equalTo(JobState.OPENED)); - assertThat(jobStats.getAssignmentExplanation(), equalTo(MlTasks.AWAITING_UPGRADE.getExplanation())); + assertThat(jobStats.getAssignmentExplanation(), equalTo(AWAITING_UPGRADE.getExplanation())); assertThat(jobStats.getNode(), is(nullValue())); GetDatafeedsStatsAction.Response.DatafeedStats datafeedStats = getDatafeedStats(datafeedId); assertThat(datafeedStats.getDatafeedState(), equalTo(DatafeedState.STARTED)); - assertThat(datafeedStats.getAssignmentExplanation(), equalTo(MlTasks.AWAITING_UPGRADE.getExplanation())); + assertThat(datafeedStats.getAssignmentExplanation(), equalTo(AWAITING_UPGRADE.getExplanation())); assertThat(datafeedStats.getNode(), is(nullValue())); Job.Builder job = createScheduledJob("job-should-not-open"); @@ -126,13 +126,11 @@ public void testEnableUpgradeMode() throws Exception { jobStats = getJobStats(jobId).get(0); assertThat(jobStats.getState(), equalTo(JobState.OPENED)); - assertThat(jobStats.getAssignmentExplanation(), isEmptyString()); - assertThat(jobStats.getNode(), is(not(nullValue()))); + assertThat(jobStats.getAssignmentExplanation(), not(equalTo(AWAITING_UPGRADE.getExplanation()))); datafeedStats = getDatafeedStats(datafeedId); assertThat(datafeedStats.getDatafeedState(), equalTo(DatafeedState.STARTED)); - assertThat(datafeedStats.getAssignmentExplanation(), isEmptyString()); - assertThat(datafeedStats.getNode(), is(not(nullValue()))); + assertThat(datafeedStats.getAssignmentExplanation(), not(equalTo(AWAITING_UPGRADE.getExplanation()))); } private void startRealtime(String jobId) throws Exception { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 714b2712367a4..de945b9bc6c3d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -300,7 +300,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu public static final Setting MIN_DISK_SPACE_OFF_HEAP = Setting.byteSizeSetting("xpack.ml.min_disk_space_off_heap", new ByteSizeValue(5, ByteSizeUnit.GB), Setting.Property.NodeScope); - private static final Logger logger = LogManager.getLogger(XPackPlugin.class); + private static final Logger logger = LogManager.getLogger(MachineLearning.class); private final Settings settings; private final Environment env; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java index d1673dd3c914c..1d230d93792fc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlConfigMigrator.java @@ -40,7 +40,6 @@ import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; @@ -513,18 +512,7 @@ public static Job updateJobForMigration(Job job) { String version = job.getJobVersion() != null ? job.getJobVersion().toString() : null; custom.put(MIGRATED_FROM_VERSION, version); builder.setCustomSettings(custom); - // Increase the model memory limit for 6.1 - 6.3 jobs Version jobVersion = job.getJobVersion(); - if (jobVersion != null && jobVersion.onOrAfter(Version.V_6_1_0) && jobVersion.before(Version.V_6_3_0)) { - // Increase model memory limit if < 512MB - if (job.getAnalysisLimits() != null && job.getAnalysisLimits().getModelMemoryLimit() != null && - job.getAnalysisLimits().getModelMemoryLimit() < 512L) { - long updatedModelMemoryLimit = (long) (job.getAnalysisLimits().getModelMemoryLimit() * 1.3); - AnalysisLimits limits = new AnalysisLimits(updatedModelMemoryLimit, - job.getAnalysisLimits().getCategorizationExamplesLimit()); - builder.setAnalysisLimits(limits); - } - } // Pre v5.5 (ml beta) jobs do not have a version. // These jobs cannot be opened, we rely on the missing version // to indicate this. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 14e434099f843..a81ee5f86e982 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -125,7 +125,7 @@ private void getForecastRequestStats(String jobId, String forecastId, ActionList } static void validate(Job job, ForecastJobAction.Request request) { - if (job.getJobVersion() == null || job.getJobVersion().before(Version.V_6_1_0)) { + if (job.getJobVersion() == null || job.getJobVersion().before(Version.fromString("6.1.0"))) { throw ExceptionsHelper.badRequestException( "Cannot run forecast because jobs created prior to version 6.1 are not supported"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index b476e3e465463..36e5e91b4326b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -76,6 +76,12 @@ static TextLogFileStructureFinder makeTextLogFileStructureFinder(List ex } // Don't add the last message, as it might be partial and mess up subsequent pattern finding + if (sampleMessages.isEmpty()) { + throw new IllegalArgumentException("Failed to create more than one message from the sample lines provided. (The " + + "last is discarded in case the sample is incomplete.) If your sample does contain multiple messages the " + + "problem is probably that the primary timestamp format has been incorrectly detected, so try overriding it."); + } + FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.SEMI_STRUCTURED_TEXT) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java index 29d141717ccc8..4516686b65202 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessor.java @@ -294,8 +294,14 @@ private void notifyModelMemoryStatusChange(Context context, ModelSizeStats model if (memoryStatus == ModelSizeStats.MemoryStatus.SOFT_LIMIT) { auditor.warning(context.jobId, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_SOFT_LIMIT)); } else if (memoryStatus == ModelSizeStats.MemoryStatus.HARD_LIMIT) { - auditor.error(context.jobId, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT, + if (modelSizeStats.getModelBytesMemoryLimit() == null || modelSizeStats.getModelBytesExceeded() == null) { + auditor.error(context.jobId, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT_PRE_7_2, new ByteSizeValue(modelSizeStats.getModelBytes(), ByteSizeUnit.BYTES).toString())); + } else { + auditor.error(context.jobId, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT, + new ByteSizeValue(modelSizeStats.getModelBytesMemoryLimit(), ByteSizeUnit.BYTES).toString(), + new ByteSizeValue(modelSizeStats.getModelBytesExceeded(), ByteSizeUnit.BYTES).toString())); + } } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/results/AutodetectResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/results/AutodetectResult.java index f3afa98b55a46..b831f1b0aee62 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/results/AutodetectResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/results/AutodetectResult.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.results; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -133,19 +132,14 @@ public AutodetectResult(StreamInput in) throws IOException { this.flushAcknowledgement = null; } - if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - if (in.readBoolean()) { - this.forecast = new Forecast(in); - } else { - this.forecast = null; - } - if (in.readBoolean()) { - this.forecastRequestStats = new ForecastRequestStats(in); - } else { - this.forecastRequestStats = null; - } + if (in.readBoolean()) { + this.forecast = new Forecast(in); } else { this.forecast = null; + } + if (in.readBoolean()) { + this.forecastRequestStats = new ForecastRequestStats(in); + } else { this.forecastRequestStats = null; } } @@ -161,11 +155,8 @@ public void writeTo(StreamOutput out) throws IOException { writeNullable(modelPlot, out); writeNullable(categoryDefinition, out); writeNullable(flushAcknowledgement, out); - - if (out.getVersion().onOrAfter(Version.V_6_1_0)) { - writeNullable(forecast, out); - writeNullable(forecastRequestStats, out); - } + writeNullable(forecast, out); + writeNullable(forecastRequestStats, out); } private static void writeNullable(Writeable writeable, StreamOutput out) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java index 287bd22f91f92..9e0692672349d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/license/MachineLearningLicensingTests.java @@ -8,17 +8,13 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteDatafeedAction; @@ -34,7 +30,6 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; -import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.Before; @@ -61,63 +56,50 @@ public void testMachineLearningPutJobActionRestricted() { License.OperationMode mode = randomInvalidLicenseType(); enableLicensing(mode); assertMLAllowed(false); + // test that license restricted apis do not work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture. newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), listener); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), listener); listener.actionGet(); - fail("put job action should not be enabled!"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.status(), is(RestStatus.FORBIDDEN)); - assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); - } + }); + assertThat(e.status(), is(RestStatus.FORBIDDEN)); + assertThat(e.getMessage(), containsString("non-compliant")); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); // Pick a license that does allow machine learning mode = randomValidLicenseType(); enableLicensing(mode); assertMLAllowed(true); // test that license restricted apis do now work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), listener); - PutJobAction.Response response = listener.actionGet(); - assertNotNull(response); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), listener); + PutJobAction.Response response = listener.actionGet(); + assertNotNull(response); } public void testMachineLearningOpenJobActionRestricted() throws Exception { String jobId = "testmachinelearningopenjobactionrestricted"; assertMLAllowed(true); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response response = putJobListener.actionGet(); - assertNotNull(response); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response response = putJobListener.actionGet(); + assertNotNull(response); // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); enableLicensing(mode); assertMLAllowed(false); // test that license restricted apis do not work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), listener); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), listener); listener.actionGet(); - fail("open job action should not be enabled!"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.status(), is(RestStatus.FORBIDDEN)); - assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); - } + }); + assertThat(e.status(), is(RestStatus.FORBIDDEN)); + assertThat(e.getMessage(), containsString("non-compliant")); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); // Pick a license that does allow machine learning mode = randomValidLicenseType(); @@ -131,13 +113,10 @@ public void testMachineLearningOpenJobActionRestricted() throws Exception { }); // test that license restricted apis do now work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), listener); - AcknowledgedResponse response = listener.actionGet(); - assertNotNull(response); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), listener); + AcknowledgedResponse response2 = listener.actionGet(); + assertNotNull(response2); } public void testMachineLearningPutDatafeedActionRestricted() throws Exception { @@ -145,46 +124,36 @@ public void testMachineLearningPutDatafeedActionRestricted() throws Exception { String datafeedId = jobId + "-datafeed"; assertMLAllowed(true); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); enableLicensing(mode); assertMLAllowed(false); // test that license restricted apis do not work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, Collections.singletonList(jobId))), listener); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, Collections.singletonList(jobId))), listener); listener.actionGet(); - fail("put datafeed action should not be enabled!"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.status(), is(RestStatus.FORBIDDEN)); - assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); - } + }); + assertThat(e.status(), is(RestStatus.FORBIDDEN)); + assertThat(e.getMessage(), containsString("non-compliant")); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); // Pick a license that does allow machine learning mode = randomValidLicenseType(); enableLicensing(mode); assertMLAllowed(true); // test that license restricted apis do now work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, Collections.singletonList(jobId))), listener); - PutDatafeedAction.Response response = listener.actionGet(); - assertNotNull(response); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, Collections.singletonList(jobId))), listener); + PutDatafeedAction.Response response = listener.actionGet(); + assertNotNull(response); } public void testAutoCloseJobWithDatafeed() throws Exception { @@ -194,31 +163,29 @@ public void testAutoCloseJobWithDatafeed() throws Exception { String datafeedIndex = jobId + "-data"; prepareCreate(datafeedIndex).addMapping("type", "{\"type\":{\"properties\":{\"time\":{\"type\":\"date\"}}}}", XContentType.JSON).get(); - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - // put job - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - // put datafeed - PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, - Collections.singletonList(datafeedIndex))), putDatafeedListener); - PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); - assertNotNull(putDatafeedResponse); - // open job - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - // start datafeed - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); - listener.actionGet(); - } + + // put job + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); + // put datafeed + PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, + Collections.singletonList(datafeedIndex))), putDatafeedListener); + PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); + assertNotNull(putDatafeedResponse); + // open job + PlainActionFuture openJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener); + AcknowledgedResponse openJobResponse = openJobListener.actionGet(); + assertNotNull(openJobResponse); + // start datafeed + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); + listener.actionGet(); + if (randomBoolean()) { enableLicensing(randomInvalidLicenseType()); @@ -245,18 +212,15 @@ public void testAutoCloseJobWithDatafeed() throws Exception { enableLicensing(randomValidLicenseType()); assertMLAllowed(true); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - // open job - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - // start datafeed - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); - listener.actionGet(); - } + // open job + PlainActionFuture openJobListener2 = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener2); + AcknowledgedResponse openJobResponse3 = openJobListener2.actionGet(); + assertNotNull(openJobResponse3); + // start datafeed + PlainActionFuture listener2 = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener2); + listener2.actionGet(); assertBusy(() -> { JobState jobState = getJobStats(jobId).getState(); @@ -299,24 +263,20 @@ public void testMachineLearningStartDatafeedActionRestricted() throws Exception prepareCreate(datafeedIndex).addMapping("type", "{\"type\":{\"properties\":{\"time\":{\"type\":\"date\"}}}}", XContentType.JSON).get(); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, - Collections.singletonList(datafeedIndex))), putDatafeedListener); - PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); - assertNotNull(putDatafeedResponse); - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); + PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, + Collections.singletonList(datafeedIndex))), putDatafeedListener); + PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); + assertNotNull(putDatafeedResponse); + PlainActionFuture openJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener); + AcknowledgedResponse openJobResponse = openJobListener.actionGet(); + assertNotNull(openJobResponse); // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); @@ -333,36 +293,30 @@ public void testMachineLearningStartDatafeedActionRestricted() throws Exception }); // test that license restricted apis do not work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); + ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); + new MachineLearningClient(client()).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); listener.actionGet(); - fail("start datafeed action should not be enabled!"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.status(), is(RestStatus.FORBIDDEN)); - assertThat(e.getMessage(), containsString("non-compliant")); - assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); - } + }); + assertThat(e.status(), is(RestStatus.FORBIDDEN)); + assertThat(e.getMessage(), containsString("non-compliant")); + assertThat(e.getMetadata(LicenseUtils.EXPIRED_FEATURE_METADATA), hasItem(XPackField.MACHINE_LEARNING)); // Pick a license that does allow machine learning mode = randomValidLicenseType(); enableLicensing(mode); assertMLAllowed(true); // test that license restricted apis do now work - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - // re-open job now that the license is valid again - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); - AcknowledgedResponse response = listener.actionGet(); - assertNotNull(response); - } + // re-open job now that the license is valid again + PlainActionFuture openJobListener2 = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener2); + AcknowledgedResponse openJobResponse3 = openJobListener2.actionGet(); + assertNotNull(openJobResponse3); + + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).startDatafeed(new StartDatafeedAction.Request(datafeedId, 0L), listener); + AcknowledgedResponse response = listener.actionGet(); + assertNotNull(response); } public void testMachineLearningStopDatafeedActionNotRestricted() throws Exception { @@ -373,29 +327,25 @@ public void testMachineLearningStopDatafeedActionNotRestricted() throws Exceptio prepareCreate(datafeedIndex).addMapping("type", "{\"type\":{\"properties\":{\"time\":{\"type\":\"date\"}}}}", XContentType.JSON).get(); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, - Collections.singletonList(datafeedIndex))), putDatafeedListener); - PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); - assertNotNull(putDatafeedResponse); - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - PlainActionFuture startDatafeedListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).startDatafeed( - new StartDatafeedAction.Request(datafeedId, 0L), startDatafeedListener); - AcknowledgedResponse startDatafeedResponse = startDatafeedListener.actionGet(); - assertNotNull(startDatafeedResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); + PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, + Collections.singletonList(datafeedIndex))), putDatafeedListener); + PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); + assertNotNull(putDatafeedResponse); + PlainActionFuture openJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener); + AcknowledgedResponse openJobResponse = openJobListener.actionGet(); + assertNotNull(openJobResponse); + PlainActionFuture startDatafeedListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).startDatafeed( + new StartDatafeedAction.Request(datafeedId, 0L), startDatafeedListener); + AcknowledgedResponse startDatafeedResponse = startDatafeedListener.actionGet(); + assertNotNull(startDatafeedResponse); boolean invalidLicense = randomBoolean(); if (invalidLicense) { @@ -404,30 +354,27 @@ public void testMachineLearningStopDatafeedActionNotRestricted() throws Exceptio enableLicensing(randomValidLicenseType()); } - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).stopDatafeed(new StopDatafeedAction.Request(datafeedId), listener); - if (invalidLicense) { - // the stop datafeed due to invalid license happens async, so check if the datafeed turns into stopped state: - assertBusy(() -> { - GetDatafeedsStatsAction.Response response = - new MachineLearningClient(client) - .getDatafeedsStats(new GetDatafeedsStatsAction.Request(datafeedId)).actionGet(); - assertEquals(DatafeedState.STOPPED, response.getResponse().results().get(0).getDatafeedState()); - }); - } else { - listener.actionGet(); - } - - if (invalidLicense) { - // the close due to invalid license happens async, so check if the job turns into closed state: - assertBusy(() -> { - GetJobsStatsAction.Response response = - new MachineLearningClient(client).getJobsStats(new GetJobsStatsAction.Request(jobId)).actionGet(); - assertEquals(JobState.CLOSED, response.getResponse().results().get(0).getState()); - }); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).stopDatafeed(new StopDatafeedAction.Request(datafeedId), listener); + if (invalidLicense) { + // the stop datafeed due to invalid license happens async, so check if the datafeed turns into stopped state: + assertBusy(() -> { + GetDatafeedsStatsAction.Response response = + new MachineLearningClient(client()) + .getDatafeedsStats(new GetDatafeedsStatsAction.Request(datafeedId)).actionGet(); + assertEquals(DatafeedState.STOPPED, response.getResponse().results().get(0).getDatafeedState()); + }); + } else { + listener.actionGet(); + } + + if (invalidLicense) { + // the close due to invalid license happens async, so check if the job turns into closed state: + assertBusy(() -> { + GetJobsStatsAction.Response response = + new MachineLearningClient(client()).getJobsStats(new GetJobsStatsAction.Request(jobId)).actionGet(); + assertEquals(JobState.CLOSED, response.getResponse().results().get(0).getState()); + }); } } @@ -435,18 +382,14 @@ public void testMachineLearningCloseJobActionNotRestricted() throws Exception { String jobId = "testmachinelearningclosejobactionnotrestricted"; assertMLAllowed(true); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - PlainActionFuture openJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).openJob(new OpenJobAction.Request(jobId), openJobListener); - AcknowledgedResponse openJobResponse = openJobListener.actionGet(); - assertNotNull(openJobResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); + PlainActionFuture openJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).openJob(new OpenJobAction.Request(jobId), openJobListener); + AcknowledgedResponse openJobResponse = openJobListener.actionGet(); + assertNotNull(openJobResponse); boolean invalidLicense = randomBoolean(); if (invalidLicense) { @@ -455,22 +398,19 @@ public void testMachineLearningCloseJobActionNotRestricted() throws Exception { enableLicensing(randomValidLicenseType()); } - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - CloseJobAction.Request request = new CloseJobAction.Request(jobId); - request.setCloseTimeout(TimeValue.timeValueSeconds(20)); - if (invalidLicense) { - // the close due to invalid license happens async, so check if the job turns into closed state: - assertBusy(() -> { - GetJobsStatsAction.Response response = - new MachineLearningClient(client).getJobsStats(new GetJobsStatsAction.Request(jobId)).actionGet(); - assertEquals(JobState.CLOSED, response.getResponse().results().get(0).getState()); - }); - } else { - new MachineLearningClient(client).closeJob(request, listener); - listener.actionGet(); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + CloseJobAction.Request request = new CloseJobAction.Request(jobId); + request.setCloseTimeout(TimeValue.timeValueSeconds(20)); + if (invalidLicense) { + // the close due to invalid license happens async, so check if the job turns into closed state: + assertBusy(() -> { + GetJobsStatsAction.Response response = + new MachineLearningClient(client()).getJobsStats(new GetJobsStatsAction.Request(jobId)).actionGet(); + assertEquals(JobState.CLOSED, response.getResponse().results().get(0).getState()); + }); + } else { + new MachineLearningClient(client()).closeJob(request, listener); + listener.actionGet(); } } @@ -478,25 +418,18 @@ public void testMachineLearningDeleteJobActionNotRestricted() throws Exception { String jobId = "testmachinelearningclosejobactionnotrestricted"; assertMLAllowed(true); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); // Pick a random license License.OperationMode mode = randomLicenseType(); enableLicensing(mode); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).deleteJob(new DeleteJobAction.Request(jobId), listener); - listener.actionGet(); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).deleteJob(new DeleteJobAction.Request(jobId), listener); + listener.actionGet(); } public void testMachineLearningDeleteDatafeedActionNotRestricted() throws Exception { @@ -504,31 +437,24 @@ public void testMachineLearningDeleteDatafeedActionNotRestricted() throws Except String datafeedId = jobId + "-datafeed"; assertMLAllowed(true); // test that license restricted apis do now work - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture putJobListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); - PutJobAction.Response putJobResponse = putJobListener.actionGet(); - assertNotNull(putJobResponse); - PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).putDatafeed( - new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, - Collections.singletonList(jobId))), putDatafeedListener); - PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); - assertNotNull(putDatafeedResponse); - } + PlainActionFuture putJobListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putJob(new PutJobAction.Request(createJob(jobId)), putJobListener); + PutJobAction.Response putJobResponse = putJobListener.actionGet(); + assertNotNull(putJobResponse); + PlainActionFuture putDatafeedListener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).putDatafeed( + new PutDatafeedAction.Request(createDatafeed(datafeedId, jobId, + Collections.singletonList(jobId))), putDatafeedListener); + PutDatafeedAction.Response putDatafeedResponse = putDatafeedListener.actionGet(); + assertNotNull(putDatafeedResponse); // Pick a random license License.OperationMode mode = randomLicenseType(); enableLicensing(mode); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateMachineLearning.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PlainActionFuture listener = PlainActionFuture.newFuture(); - new MachineLearningClient(client).deleteDatafeed(new DeleteDatafeedAction.Request(datafeedId), listener); - listener.actionGet(); - } + PlainActionFuture listener = PlainActionFuture.newFuture(); + new MachineLearningClient(client()).deleteDatafeed(new DeleteDatafeedAction.Request(datafeedId), listener); + listener.actionGet(); } private static OperationMode randomInvalidLicenseType() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index df447d7ec6c35..2de1f79c64aa6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.xcontent.ToXContent; @@ -29,11 +28,11 @@ import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; -import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; @@ -295,21 +294,6 @@ public void testNodeCount() throws Exception { source = new XContentSource(builder); } assertThat(source.getValue("node_count"), equalTo(nodeCount)); - - BytesStreamOutput oldOut = new BytesStreamOutput(); - oldOut.setVersion(Version.V_6_0_0); - usage.writeTo(oldOut); - StreamInput oldInput = oldOut.bytes().streamInput(); - oldInput.setVersion(Version.V_6_0_0); - XPackFeatureSet.Usage oldSerializedUsage = new MachineLearningFeatureSetUsage(oldInput); - - XContentSource oldSource; - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - oldSerializedUsage.toXContent(builder, ToXContent.EMPTY_PARAMS); - oldSource = new XContentSource(builder); - } - - assertNull(oldSource.getValue("node_count")); } public void testUsageGivenMlMetadataNotInstalled() throws Exception { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportForecastJobActionRequestTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportForecastJobActionRequestTests.java index be115af6bcd72..e60e86cc54960 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportForecastJobActionRequestTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportForecastJobActionRequestTests.java @@ -23,7 +23,7 @@ public class TransportForecastJobActionRequestTests extends ESTestCase { public void testValidate_jobVersionCannonBeBefore61() { Job.Builder jobBuilder = createTestJob("forecast-it-test-job-version"); - jobBuilder.setJobVersion(Version.V_6_0_1); + jobBuilder.setJobVersion(Version.fromString("6.0.1")); ForecastJobAction.Request request = new ForecastJobAction.Request(); Exception e = expectThrows(ElasticsearchStatusException.class, () -> TransportForecastJobAction.validate(jobBuilder.build(), request)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 040ed5e1d0ed4..92d7bbcc49e54 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -370,11 +371,14 @@ public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() Map nodeAttr = new HashMap<>(); nodeAttr.put(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, "10"); nodeAttr.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, "1000000000"); + Version node1Version = VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion(Version.CURRENT)); + Version node2Version = randomValueOtherThan(node1Version, + () -> VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion(Version.CURRENT))); DiscoveryNodes nodes = DiscoveryNodes.builder() .add(new DiscoveryNode("_node_name1", "_node_id1", new TransportAddress(InetAddress.getLoopbackAddress(), 9300), - nodeAttr, Collections.emptySet(), Version.V_6_2_0)) + nodeAttr, Collections.emptySet(), node1Version)) .add(new DiscoveryNode("_node_name2", "_node_id2", new TransportAddress(InetAddress.getLoopbackAddress(), 9301), - nodeAttr, Collections.emptySet(), Version.V_6_1_0)) + nodeAttr, Collections.emptySet(), node2Version)) .build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -386,7 +390,7 @@ public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() Job job = BaseMlIntegTestCase.createFareQuoteJob("job_with_incompatible_model_snapshot") .setModelSnapshotId("incompatible_snapshot") - .setModelSnapshotMinVersion(Version.V_6_3_0) + .setModelSnapshotMinVersion(Version.CURRENT) .build(new Date()); cs.nodes(nodes); metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); @@ -394,7 +398,7 @@ public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_with_incompatible_model_snapshot", job, cs.build(), 10, 2, 30, memoryTracker, isMemoryTrackerRecentlyRefreshed, logger); assertThat(result.getExplanation(), containsString( - "because the job's model snapshot requires a node of version [6.3.0] or higher")); + "because the job's model snapshot requires a node of version [" + Version.CURRENT + "] or higher")); assertNull(result.getExecutorNode()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java index 7ed5518c65077..6cf4d61cf176c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java @@ -232,6 +232,27 @@ public void testCreateConfigsGivenElasticsearchLogAndImpossibleGrokPatternOverri "\\[%{JAVACLASS:class} *\\] %{JAVALOGMESSAGE:message}] does not match sample messages", e.getMessage()); } + public void testErrorOnIncorrectMessageFormation() { + + // This sample causes problems because the (very weird) primary timestamp format + // is not detected but a secondary format that only occurs in one line is detected + String sample = "Day 21 Month 1 Year 2019 11:04 INFO [localhost] - starting\n" + + "Day 21 Month 1 Year 2019 11:04 INFO [localhost] - startup date [Mon Jan 21 11:04:19 CET 2019]\n" + + "Day 21 Month 1 Year 2019 11:04 DEBUG [localhost] - details\n" + + "Day 21 Month 1 Year 2019 11:04 DEBUG [localhost] - more details\n" + + "Day 21 Month 1 Year 2019 11:04 WARN [localhost] - something went wrong\n"; + + String charset = randomFrom(POSSIBLE_CHARSETS); + Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> factory.createFromSample(explanation, sample, charset, hasByteOrderMarker, FileStructureOverrides.EMPTY_OVERRIDES, + NOOP_TIMEOUT_CHECKER)); + + assertEquals("Failed to create more than one message from the sample lines provided. (The last is discarded in " + + "case the sample is incomplete.) If your sample does contain multiple messages the problem is probably that " + + "the primary timestamp format has been incorrectly detected, so try overriding it.", e.getMessage()); + } + public void testCreateMultiLineMessageStartRegexGivenNoPrefaces() { for (TimestampFormatFinder.CandidateTimestampFormat candidateTimestampFormat : TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS) { String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java index 86f408a42d43b..40a1d3f969157 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutoDetectResultProcessorTests.java @@ -301,7 +301,8 @@ public void testProcessResult_modelSizeStatsWithMemoryStatusChanges() { // Now with hard_limit modelSizeStats = new ModelSizeStats.Builder(JOB_ID) .setMemoryStatus(ModelSizeStats.MemoryStatus.HARD_LIMIT) - .setModelBytes(new ByteSizeValue(512, ByteSizeUnit.MB).getBytes()) + .setModelBytesMemoryLimit(new ByteSizeValue(512, ByteSizeUnit.MB).getBytes()) + .setModelBytesExceeded(new ByteSizeValue(1, ByteSizeUnit.KB).getBytes()) .build(); when(result.getModelSizeStats()).thenReturn(modelSizeStats); processorUnderTest.processResult(context, result); @@ -311,9 +312,9 @@ public void testProcessResult_modelSizeStatsWithMemoryStatusChanges() { when(result.getModelSizeStats()).thenReturn(modelSizeStats); processorUnderTest.processResult(context, result); - // We should have only fired to notifications: one for soft_limit and one for hard_limit + // We should have only fired two notifications: one for soft_limit and one for hard_limit verify(auditor).warning(JOB_ID, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_SOFT_LIMIT)); - verify(auditor).error(JOB_ID, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT, "512mb")); + verify(auditor).error(JOB_ID, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT, "512mb", "1kb")); verifyNoMoreInteractions(auditor); } diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index b2e0c930e0d61..cb6395b18a4c2 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -13,6 +13,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } // monitoring deps compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index e03f07740095a..94b205b3d85d1 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -233,7 +233,8 @@ public void testToXContent() throws IOException { final List usages = singletonList(new MonitoringFeatureSetUsage(false, true, false, null)); final NodeInfo mockNodeInfo = mock(NodeInfo.class); - when(mockNodeInfo.getVersion()).thenReturn(Version.V_6_0_0_alpha2); + Version mockNodeVersion = Version.CURRENT.minimumIndexCompatibilityVersion(); + when(mockNodeInfo.getVersion()).thenReturn(mockNodeVersion); when(mockNodeInfo.getNode()).thenReturn(discoveryNode); final TransportInfo mockTransportInfo = mock(TransportInfo.class); @@ -446,7 +447,7 @@ public void testToXContent() throws IOException { + "\"ingest\":0" + "}," + "\"versions\":[" - + "\"6.0.0-alpha2\"" + + "\"" + mockNodeVersion + "\"" + "]," + "\"os\":{" + "\"available_processors\":32," diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java index 49fdd9ad244ac..08e04725c2e0b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java @@ -89,7 +89,7 @@ public void testToXContent() throws IOException { new TransportAddress(TransportAddress.META_ADDRESS, 9301), singletonMap("attr", "value_1"), singleton(DiscoveryNode.Role.DATA), - Version.V_6_0_0_alpha1); + Version.CURRENT.minimumIndexCompatibilityVersion()); final ShardId shardId = new ShardId("_index_a", "_uuid_a", 0); final RecoverySource source = RecoverySource.PeerRecoverySource.INSTANCE; diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java index ae363cd3c8258..2621a43ccee4d 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java @@ -5,9 +5,11 @@ */ package org.elasticsearch.xpack.monitoring.exporter.http; +import com.sun.net.httpserver.HttpsServer; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -15,6 +17,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.TestsSSLService; import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.elasticsearch.xpack.monitoring.exporter.Exporter; @@ -28,6 +31,9 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -98,6 +104,7 @@ private MockWebServer buildWebServer() throws IOException { .put("xpack.transport.security.ssl.certificate", cert) .put("xpack.transport.security.ssl.key", key) .put("xpack.transport.security.ssl.key_passphrase", "testnode") + .putList("xpack.transport.security.ssl.supported_protocols", getProtocols()) .put(globalSettings) .build(); @@ -185,4 +192,22 @@ private void clearTransientSettings(String... names) { updateSettings.transientSettings(builder.build()); client().admin().cluster().updateSettings(updateSettings).actionGet(); } + + + /** + * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to + * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK + */ + private static List getProtocols() { + if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + return List.of("TLSv1.2"); + } else { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + return List.of("TLSv1.2"); + } + } + return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; + } } diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index d159f3334b998..ae53f78dad2ff 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -19,6 +19,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } integTest.enabled = false diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index e900d76c84913..4a8d007e3b898 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; @@ -61,9 +62,9 @@ public class RollupResponseTranslator { * Verifies a live-only search response. Essentially just checks for failure then returns * the response since we have no work to do */ - public static SearchResponse verifyResponse(MultiSearchResponse.Item normalResponse) { + public static SearchResponse verifyResponse(MultiSearchResponse.Item normalResponse) throws Exception { if (normalResponse.isFailure()) { - throw new RuntimeException(normalResponse.getFailureMessage(), normalResponse.getFailure()); + throw normalResponse.getFailure(); } return normalResponse.getResponse(); } @@ -77,16 +78,30 @@ public static SearchResponse verifyResponse(MultiSearchResponse.Item normalRespo * on the translation conventions */ public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolledMsearch, - InternalAggregation.ReduceContext reduceContext) { + InternalAggregation.ReduceContext reduceContext) throws Exception { + + assert rolledMsearch.length > 0; + List responses = new ArrayList<>(); + for (MultiSearchResponse.Item item : rolledMsearch) { + if (item.isFailure()) { + Exception e = item.getFailure(); + + // If an index was deleted after execution, give a hint to the user that this is a transient error + if (e instanceof IndexNotFoundException) { + throw new ResourceNotFoundException("Index [" + ((IndexNotFoundException) e).getIndex().getName() + + "] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again."); + } - List responses = Arrays.stream(rolledMsearch) - .map(item -> { - if (item.isFailure()) { - throw new RuntimeException(item.getFailureMessage(), item.getFailure()); - } - return item.getResponse(); - }).collect(Collectors.toList()); + // Otherwise just throw + throw e; + } + // No error, add to responses + responses.add(item.getResponse()); + } + + assert responses.size() > 0; return doCombineResponse(null, responses, reduceContext); } @@ -187,48 +202,45 @@ public static SearchResponse translateResponse(MultiSearchResponse.Item[] rolled * @param msearchResponses The responses from the msearch, where the first response is the live-index response */ public static SearchResponse combineResponses(MultiSearchResponse.Item[] msearchResponses, - InternalAggregation.ReduceContext reduceContext) { - boolean liveMissing = false; + InternalAggregation.ReduceContext reduceContext) throws Exception { + assert msearchResponses.length >= 2; - // The live response is always first - MultiSearchResponse.Item liveResponse = msearchResponses[0]; - if (liveResponse.isFailure()) { - Exception e = liveResponse.getFailure(); - // If we have a rollup response we can tolerate a missing live response - if (e instanceof IndexNotFoundException) { - logger.warn("\"Live\" index not found during rollup search.", e); - liveMissing = true; + boolean first = true; + SearchResponse liveResponse = null; + List rolledResponses = new ArrayList<>(); + for (MultiSearchResponse.Item item : msearchResponses) { + if (item.isFailure()) { + Exception e = item.getFailure(); + + // If an index was deleted after execution, give a hint to the user that this is a transient error + if (e instanceof IndexNotFoundException) { + throw new ResourceNotFoundException("Index [" + ((IndexNotFoundException) e).getIndex() + "] was not found, " + + "likely because it was deleted while the request was in-flight. Rollup does not support partial search results, " + + "please try the request again.", e); + } + + // Otherwise just throw + throw e; + } + + // No error, add to responses + if (first) { + liveResponse = item.getResponse(); } else { - throw new RuntimeException(liveResponse.getFailureMessage(), liveResponse.getFailure()); + rolledResponses.add(item.getResponse()); } + first = false; } - List rolledResponses = Arrays.stream(msearchResponses) - .skip(1) - .map(item -> { - if (item.isFailure()) { - Exception e = item.getFailure(); - // If we have a normal response we can tolerate a missing rollup response, although it theoretically - // should be handled by a different code path (verifyResponse) - if (e instanceof IndexNotFoundException) { - logger.warn("Rollup index not found during rollup search.", e); - } else { - throw new RuntimeException(item.getFailureMessage(), item.getFailure()); - } - return null; - } else { - return item.getResponse(); - } - }).filter(Objects::nonNull).collect(Collectors.toList()); - // If we only have a live index left, process it directly - if (rolledResponses.isEmpty() && liveMissing == false) { - return verifyResponse(liveResponse); - } else if (rolledResponses.isEmpty() && liveMissing) { - throw new RuntimeException("No indices (live or rollup) found during rollup search"); + // If we only have a live index left, just return it directly. We know it can't be an error already + if (rolledResponses.isEmpty() && liveResponse != null) { + return liveResponse; + } else if (rolledResponses.isEmpty()) { + throw new ResourceNotFoundException("No indices (live or rollup) found during rollup search"); } - return doCombineResponse(liveResponse.getResponse(), rolledResponses, reduceContext); + return doCombineResponse(liveResponse, rolledResponses, reduceContext); } private static SearchResponse doCombineResponse(SearchResponse liveResponse, List rolledResponses, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 414a0d08ef35a..2a1308353d6ad 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -111,7 +111,7 @@ protected void doExecute(Task task, SearchRequest request, ActionListener listener) { try { - // this is needed to exclude buckets that can still receive new documents. + // this is needed to exclude buckets that can still receive new documents DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); - long rounded = dateHisto.createRounding().round(now); - if (dateHisto.getDelay() != null) { - // if the job has a delay we filter all documents that appear before it. - maxBoundary = rounded - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis(); - } else { - maxBoundary = rounded; - } + // if the job has a delay we filter all documents that appear before it + long delay = dateHisto.getDelay() != null ? + TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis() : 0; + maxBoundary = dateHisto.createRounding().round(now - delay); listener.onResponse(null); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 84f0862183c44..25fe2f51b2f22 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.CheckedConsumer; @@ -110,14 +111,13 @@ public void testLiveFailure() { public void testRollupFailure() { MultiSearchResponse.Item[] failure = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new IndexNotFoundException("live missing")), new MultiSearchResponse.Item(null, new RuntimeException("rollup failure"))}; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); Exception e = expectThrows(RuntimeException.class, - () -> RollupResponseTranslator.combineResponses(failure, + () -> RollupResponseTranslator.translateResponse(failure, new InternalAggregation.ReduceContext(bigArrays, scriptService, true))); assertThat(e.getMessage(), equalTo("rollup failure")); } @@ -130,13 +130,14 @@ public void testLiveMissingRollupMissing() { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - Exception e = expectThrows(RuntimeException.class, + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.combineResponses(failure, new InternalAggregation.ReduceContext(bigArrays, scriptService, true))); - assertThat(e.getMessage(), equalTo("No indices (live or rollup) found during rollup search")); + assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again.")); } - public void testMissingLiveIndex() { + public void testMissingLiveIndex() throws Exception { SearchResponse responseWithout = mock(SearchResponse.class); when(responseWithout.getTook()).thenReturn(new TimeValue(100)); List aggTree = new ArrayList<>(1); @@ -175,16 +176,13 @@ public void testMissingLiveIndex() { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - SearchResponse response = RollupResponseTranslator.combineResponses(msearch, - new InternalAggregation.ReduceContext(bigArrays, scriptService, true)); - assertNotNull(response); - Aggregations responseAggs = response.getAggregations(); - assertNotNull(responseAggs); - Avg avg = responseAggs.get("foo"); - assertThat(avg.getValue(), equalTo(5.0)); + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.combineResponses(msearch, + new InternalAggregation.ReduceContext(bigArrays, scriptService, true))); + assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again.")); } - public void testRolledMissingAggs() { + public void testRolledMissingAggs() throws Exception { SearchResponse responseWithout = mock(SearchResponse.class); when(responseWithout.getTook()).thenReturn(new TimeValue(100)); @@ -192,13 +190,12 @@ public void testRolledMissingAggs() { when(responseWithout.getAggregations()).thenReturn(mockAggsWithout); MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{ - new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")), new MultiSearchResponse.Item(responseWithout, null)}; BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - SearchResponse response = RollupResponseTranslator.combineResponses(msearch, + SearchResponse response = RollupResponseTranslator.translateResponse(msearch, new InternalAggregation.ReduceContext(bigArrays, scriptService, true)); assertNotNull(response); Aggregations responseAggs = response.getAggregations(); @@ -215,12 +212,13 @@ public void testMissingRolledIndex() { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); ScriptService scriptService = mock(ScriptService.class); - SearchResponse finalResponse = RollupResponseTranslator.combineResponses(msearch, - new InternalAggregation.ReduceContext(bigArrays, scriptService, true)); - assertThat(finalResponse, equalTo(response)); + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.combineResponses(msearch, + new InternalAggregation.ReduceContext(bigArrays, scriptService, true))); + assertThat(e.getMessage(), equalTo("Index [[foo]] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again.")); } - public void testVerifyNormal() { + public void testVerifyNormal() throws Exception { SearchResponse response = mock(SearchResponse.class); MultiSearchResponse.Item item = new MultiSearchResponse.Item(response, null); @@ -235,7 +233,7 @@ public void testVerifyMissingNormal() { assertThat(e.getMessage(), equalTo("no such index [foo]")); } - public void testTranslateRollup() { + public void testTranslateRollup() throws Exception { SearchResponse response = mock(SearchResponse.class); when(response.getTook()).thenReturn(new TimeValue(100)); List aggTree = new ArrayList<>(1); @@ -286,9 +284,10 @@ public void testTranslateMissingRollup() { ScriptService scriptService = mock(ScriptService.class); InternalAggregation.ReduceContext context = new InternalAggregation.ReduceContext(bigArrays, scriptService, true); - Exception e = expectThrows(RuntimeException.class, + ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[]{missing}, context)); - assertThat(e.getMessage(), equalTo("no such index [foo]")); + assertThat(e.getMessage(), equalTo("Index [foo] was not found, likely because it was deleted while the request was in-flight. " + + "Rollup does not support partial search results, please try the request again.")); } public void testMissingFilter() { @@ -351,7 +350,7 @@ public void testMatchingNameNotFilter() { equalTo("Expected [filter_foo] to be a FilterAggregation, but was [InternalMax]")); } - public void testSimpleReduction() { + public void testSimpleReduction() throws Exception { SearchResponse protoResponse = mock(SearchResponse.class); when(protoResponse.getTook()).thenReturn(new TimeValue(100)); List protoAggTree = new ArrayList<>(1); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index b1ae36c538fec..00f2fbd3171d4 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -594,7 +594,7 @@ public void testMatchingIndexInMetadata() throws IOException { assertThat(result.getJobCaps().size(), equalTo(1)); } - public void testLiveOnlyProcess() { + public void testLiveOnlyProcess() throws Exception { String[] indices = new String[]{"foo"}; IndexMetaData indexMetaData = mock(IndexMetaData.class); ImmutableOpenMap.Builder meta = ImmutableOpenMap.builder(1); @@ -611,7 +611,7 @@ public void testLiveOnlyProcess() { assertThat(r, equalTo(response)); } - public void testRollupOnly() throws IOException { + public void testRollupOnly() throws Exception { String[] indices = new String[]{"foo"}; String jobName = randomAlphaOfLength(5); @@ -711,7 +711,7 @@ public void testEmptyMsearch() { assertThat(e.getMessage(), equalTo("MSearch response was empty, cannot unroll RollupSearch results")); } - public void testBoth() throws IOException { + public void testBoth() throws Exception { String[] indices = new String[]{"foo", "bar"}; String jobName = randomAlphaOfLength(5); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 7346cf6f855e1..1fe01a8246267 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -326,6 +326,58 @@ public void testSimpleDateHistoWithDelay() throws Exception { }); } + public void testSimpleDateHistoWithOverlappingDelay() throws Exception { + String rollupIndex = randomAlphaOfLengthBetween(5, 10); + String field = "the_histo"; + DateHistogramGroupConfig dateHistoConfig = + new FixedInterval(field, new DateHistogramInterval("1h"), new DateHistogramInterval("15m"), null); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); + final List> dataset = new ArrayList<>(); + long now = asLong("2015-04-01T10:30:00.000Z"); + dataset.addAll( + Arrays.asList( + asMap("the_histo", now - TimeValue.timeValueMinutes(135).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(120).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(105).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(90).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(75).getMillis()), + asMap("the_histo", now - TimeValue.timeValueHours(1).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(45).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(30).getMillis()), + asMap("the_histo", now - TimeValue.timeValueMinutes(15).getMillis()), + asMap("the_histo", now) + ) + ); + final Rounding rounding = dateHistoConfig.createRounding(); + executeTestCase(dataset, job, now, (resp) -> { + assertThat(resp.size(), equalTo(2)); + IndexRequest request = resp.get(0); + assertThat(request.index(), equalTo(rollupIndex)); + assertThat(request.sourceAsMap(), equalTo( + asMap( + "_rollup.version", 2, + "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(2).getMillis()), + "the_histo.date_histogram.interval", "1h", + "the_histo.date_histogram._count", 3, + "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), + "_rollup.id", job.getId() + ) + )); + request = resp.get(1); + assertThat(request.index(), equalTo(rollupIndex)); + assertThat(request.sourceAsMap(), equalTo( + asMap( + "_rollup.version", 2, + "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(1).getMillis()), + "the_histo.date_histogram.interval", "1h", + "the_histo.date_histogram._count", 4, + "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), + "_rollup.id", job.getId() + ) + )); + }); + } + public void testSimpleDateHistoWithTimeZone() throws Exception { final List> dataset = new ArrayList<>(); long now = asLong("2015-04-01T10:00:00.000Z"); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 27d0db1a48a86..cc5833d20e440 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -22,6 +22,9 @@ dependencies { testCompile project(path: xpackModule('sql:sql-action')) testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } compile 'com.unboundid:unboundid-ldapsdk:4.0.8' compileOnly 'org.bouncycastle:bcprov-jdk15on:1.59' @@ -322,4 +325,3 @@ gradle.projectsEvaluated { .findAll { it.path.startsWith(project.path + ":qa") } .each { check.dependsOn it.check } } - diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 00321c77808cd..29f278b95defa 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -13,6 +13,9 @@ dependencies { testImplementation 'com.google.jimfs:jimfs:1.1' testCompile "org.elasticsearch.test:framework:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } dependencyLicenses { diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle new file mode 100644 index 0000000000000..a21e3c68d3fc4 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -0,0 +1,68 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here + testCompile project(path: xpackModule('core'), configuration: 'default') + testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') +} + +task integTestNoSecurity(type: RestIntegTestTask) { + description = "Run tests against a cluster that doesn't have security" +} +tasks.getByName("integTestNoSecurityRunner").configure { + systemProperty 'tests.has_security', 'false' +} +check.dependsOn(integTestNoSecurity) + +task integTestSecurity(type: RestIntegTestTask) { + dependsOn integTestNoSecurity + description = "Run tests against a cluster that has security" +} +tasks.getByName("integTestSecurityRunner").configure { + systemProperty 'tests.has_security', 'true' +} +check.dependsOn(integTestSecurity) + +configure(extensions.findByName("integTestNoSecurityCluster")) { + clusterName = "enable-security-on-basic" + numNodes = 2 + + setting 'xpack.ilm.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'basic' + setting 'xpack.security.enabled', 'false' +} + +Task noSecurityTest = tasks.findByName("integTestNoSecurity") +configure(extensions.findByName("integTestSecurityCluster")) { + clusterName = "basic-license" + numNodes = 2 + dataDir = { nodeNum -> noSecurityTest.nodes[nodeNum].dataDir } + + setting 'xpack.ilm.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.license.self_generated.type', 'basic' + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.authc.anonymous.roles', 'anonymous' + setting 'xpack.security.transport.ssl.enabled', 'true' + setting 'xpack.security.transport.ssl.certificate', 'transport.crt' + setting 'xpack.security.transport.ssl.key', 'transport.key' + setting 'xpack.security.transport.ssl.key_passphrase', 'transport-password' + setting 'xpack.security.transport.ssl.certificate_authorities', 'ca.crt' + + extraConfigFile 'transport.key', project.projectDir.toPath().resolve('src/test/resources/ssl/transport.key').toFile() + extraConfigFile 'transport.crt', project.projectDir.toPath().resolve('src/test/resources/ssl/transport.crt').toFile() + extraConfigFile 'ca.crt', project.projectDir.toPath().resolve('src/test/resources/ssl/ca.crt').toFile() + + setupCommand 'setupAdminUser', + 'bin/elasticsearch-users', 'useradd', 'admin_user', '-p', 'admin-password', '-r', 'superuser' + setupCommand 'setupTestUser' , + 'bin/elasticsearch-users', 'useradd', 'security_test_user', '-p', 'security-test-password', '-r', 'security_test_role' + extraConfigFile 'roles.yml', project.projectDir.toPath().resolve('src/test/resources/roles.yml').toFile() +} + +integTest.enabled = false diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/java/org/elasticsearch/xpack/security/EnableSecurityOnBasicLicenseIT.java b/x-pack/plugin/security/qa/basic-enable-security/src/test/java/org/elasticsearch/xpack/security/EnableSecurityOnBasicLicenseIT.java new file mode 100644 index 0000000000000..fa64a89f2f633 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/java/org/elasticsearch/xpack/security/EnableSecurityOnBasicLicenseIT.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.yaml.ObjectPath; +import org.elasticsearch.xpack.security.authc.InternalRealms; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Locale; +import java.util.Map; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class EnableSecurityOnBasicLicenseIT extends ESRestTestCase { + + private static boolean securityEnabled; + + @BeforeClass + public static void checkTestMode() { + final String hasSecurity = System.getProperty("tests.has_security"); + securityEnabled = Booleans.parseBoolean(hasSecurity); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue("admin_user", new SecureString("admin-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("security_test_user", new SecureString("security-test-password".toCharArray())); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .build(); + } + + @Override + protected boolean preserveClusterUponCompletion() { + // If this is the first run (security not yet enabled), then don't clean up afterwards because we want to test restart with data + return securityEnabled == false; + } + + public void testSecuritySetup() throws Exception { + logger.info("Security status: {}", securityEnabled); + logger.info("Cluster:\n{}", getClusterInfo()); + logger.info("Indices:\n{}", getIndices()); + checkBasicLicenseType(); + + checkSecurityStatus(securityEnabled); + if (securityEnabled) { + checkAuthentication(); + } + + checkAllowedWrite("index_allowed"); + // Security runs second, and should see the doc from the first (non-security) run + final int expectedIndexCount = securityEnabled ? 2 : 1; + checkIndexCount("index_allowed", expectedIndexCount); + + final String otherIndex = "index_" + randomAlphaOfLengthBetween(2, 6).toLowerCase(Locale.ROOT); + if (securityEnabled) { + checkDeniedWrite(otherIndex); + } else { + checkAllowedWrite(otherIndex); + } + } + + private String getClusterInfo() throws IOException { + Map info = getAsMap("/"); + assertThat(info, notNullValue()); + return info.toString(); + } + + private String getIndices() throws IOException { + final Request request = new Request("GET", "/_cat/indices"); + Response response = client().performRequest(request); + return EntityUtils.toString(response.getEntity()); + } + + private void checkBasicLicenseType() throws IOException { + Map license = getAsMap("/_license"); + assertThat(license, notNullValue()); + assertThat(ObjectPath.evaluate(license, "license.type"), equalTo("basic")); + } + + private void checkSecurityStatus(boolean expectEnabled) throws IOException { + Map usage = getAsMap("/_xpack/usage"); + assertThat(usage, notNullValue()); + assertThat(ObjectPath.evaluate(usage, "security.available"), equalTo(true)); + assertThat(ObjectPath.evaluate(usage, "security.enabled"), equalTo(expectEnabled)); + if (expectEnabled) { + for (String realm : Arrays.asList("file", "native")) { + assertThat(ObjectPath.evaluate(usage, "security.realms." + realm + ".available"), equalTo(true)); + assertThat(ObjectPath.evaluate(usage, "security.realms." + realm + ".enabled"), equalTo(true)); + } + for (String realm : InternalRealms.getConfigurableRealmsTypes()) { + if (realm.equals("file") == false && realm.equals("native") == false) { + assertThat(ObjectPath.evaluate(usage, "security.realms." + realm + ".available"), equalTo(false)); + assertThat(ObjectPath.evaluate(usage, "security.realms." + realm + ".enabled"), equalTo(false)); + } + } + } + } + + private void checkAuthentication() throws IOException { + final Map auth = getAsMap("/_security/_authenticate"); + // From file realm, configured in build.gradle + assertThat(ObjectPath.evaluate(auth, "username"), equalTo("security_test_user")); + assertThat(ObjectPath.evaluate(auth, "roles"), contains("security_test_role")); + } + + private void checkAllowedWrite(String indexName) throws IOException { + final Request request = new Request("POST", "/" + indexName + "/_doc"); + request.setJsonEntity("{ \"key\" : \"value\" }"); + Response response = client().performRequest(request); + final Map result = entityAsMap(response); + assertThat(ObjectPath.evaluate(result, "_index"), equalTo(indexName)); + assertThat(ObjectPath.evaluate(result, "result"), equalTo("created")); + } + + private void checkDeniedWrite(String indexName) { + final Request request = new Request("POST", "/" + indexName + "/_doc"); + request.setJsonEntity("{ \"key\" : \"value\" }"); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(403)); + assertThat(e.getMessage(), containsString("unauthorized for user [security_test_user]")); + } + + private void checkIndexCount(String indexName, int expectedCount) throws IOException { + final Request request = new Request("POST", "/" + indexName + "/_refresh"); + adminClient().performRequest(request); + + final Map result = getAsMap("/" + indexName + "/_count"); + assertThat(ObjectPath.evaluate(result, "count"), equalTo(expectedCount)); + } +} diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/roles.yml b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/roles.yml new file mode 100644 index 0000000000000..eb6c3ec45786b --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/roles.yml @@ -0,0 +1,14 @@ +# A basic role that is used to test security +security_test_role: + cluster: + - monitor + - "cluster:admin/xpack/license/*" + indices: + - names: [ "index_allowed" ] + privileges: [ "read", "write", "create_index" ] + - names: [ "*" ] + privileges: [ "monitor" ] + +anonymous: + cluster: + - monitor \ No newline at end of file diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/README.asciidoc b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/README.asciidoc new file mode 100644 index 0000000000000..b3729f42d17b0 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/README.asciidoc @@ -0,0 +1,30 @@ += Keystore Details + +This document details the steps used to create the certificate and keystore files in this directory. + +== Instructions on generating certificates +The certificates in this directory have been generated using elasticsearch-certutil (7.0.0 SNAPSHOT) + +[source,shell] +----------------------------------------------------------------------------------------------------------- +elasticsearch-certutil ca --pem --out=ca.zip --pass="ca-password" --days=3500 +unzip ca.zip +mv ca/ca.* ./ + +rm ca.zip +rmdir ca +----------------------------------------------------------------------------------------------------------- + +[source,shell] +----------------------------------------------------------------------------------------------------------- +elasticsearch-certutil cert --pem --name=transport --out=transport.zip --pass="transport-password" --days=3500 \ + --ca-cert=ca.crt --ca-key=ca.key --ca-pass="ca-password" \ + --dns=localhost --dns=localhost.localdomain --dns=localhost4 --dns=localhost4.localdomain4 --dns=localhost6 --dns=localhost6.localdomain6 \ + --ip=127.0.0.1 --ip=0:0:0:0:0:0:0:1 + +unzip transport.zip +mv transport/transport.* ./ + +rm transport.zip +rmdir transport +----------------------------------------------------------------------------------------------------------- diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.crt b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.crt new file mode 100644 index 0000000000000..95068217a612a --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIVAL0RCyWTbBDd2ntuWoqRwW0IE9+9MA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE5MDQzMDAzNTQwN1oXDTI4MTEyODAzNTQwN1owNDEyMDAG +A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDA4VwADiyl+Xl15D27gtpS +TXZfHt40MUx12FY0MEd3A3hU+Fp4PaLE2ejECx04yrq8Rfc0Yltux/Fc5zE98XM8 +dY4j0QN/e6C/f0mrBI0KaJ25nv0MWFvoqS/D3vWvDFLUP1a3OZICWWoBDG+zCHe5 +Aq0qwge+FU9IUc7G2WPJeUp4e0+EzLxFInls3rTX1xkyq8Q6PT3gi0RZKvHqIudL +DAXDVEGWNxEX9KwQ1nMtRkDZICx/W665kZiBD4XC3WuEkYlDL1ISVw3cmsbYdhb4 +IusIK5zNERi4ewTgDDxic8TbRpkQW189/M3IglrQipH5ixfF6oNSyoRVAa3KZqj5 +AgMBAAGjUzBRMB0GA1UdDgQWBBRI4mOaeunbu60GfjWTpHcvhb6/YTAfBgNVHSME +GDAWgBRI4mOaeunbu60GfjWTpHcvhb6/YTAPBgNVHRMBAf8EBTADAQH/MA0GCSqG +SIb3DQEBCwUAA4IBAQCUOXddlGoU+Ni85D0cRjYYxyx8a5Rwngp+kztttT/5l3Ch +5JMZyl/xcaTryh37BG3+NuqKR1zHtcLpq/+xaCrwBQ8glJofF+1n9w4zBL9nrH5c +O5NgG7+u/sfB+xdqMVdoBBqfm1Roq7O1T/kBXis1+5ZtBlj+7WIKeWWTZGLTrHV+ +MW5RDOmMoLkqT5qzpR9Yf7UChPVrvKGs4Kd+fYJeb0R5W6mvZQ6/FrsLwAWLC2Q1 +rW1u4zIkO0ih5qd52dl/73u7SWqzWxPy1ynwqJefD4AA0uaJYtMlXHK2vYjutHvY +K7301gzc5fueqo1YMmPgsjjsj+ErR1t0ve7faOBy +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.key b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.key new file mode 100644 index 0000000000000..a6de1f9958d32 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/ca.key @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,0F6B57727499DA47 + +OmK77UnFtk/zNEbNTxNJz73D2XWFDWLyHCDZPEXkX55vch/pXkkfVbWbPBFv35nA +LKni0j802Qnc1D4V3BUSmVWHk9SfjI5nlcDkSELbgCOpuZkf6Bmk8FgLfV42BFxn +lAiY+oBB4VV+rxA+HUV6CiWWrTgSjkvFyXCBZzcTEPdF2ifWerjsWKOjQZJtmvMX +J5DhYCCp1/n4R/OQpYxQiOqJdUxbKx4k0h139ySK2PggdL17w1a7AuQnHwJO3+ic +1IntPKD/ZhpAPPzq8A5R5jZyvrSj9Dgv94PXAQ5xTZWnZd2nuJtbkrYJ47pBR3Re +R2aZdF/N8ljG1TYHuJXdiL3A80Y3AS00TFNgSAZKSz5Ktt6zI2EAZu9xdHd8EfUm +m3qJmfce9P9cCBzo7DLGHwRMfu9hEFWN9dRD8KWNcB+ahQ1/jItzi25yZM6vD6+S +ZVUzegybeYlMwPks3YObX9IdUSwAd9F76SVwHCsziKQW4RfETaShG/oRNqq04nqA +E//KUl5bfTuv8jumyMlg6iiqIDQAUvzI74mWe2lIy6rglm2rR39SN4NxSrnTwoz4 +KAf+kHWJVyxFqEYs+dqboRWpRfQac3+iYoIlZFob/nRhNyKnccTkHtjh7+1C8CXI +sYXhuJZLCoiXh990M9t1ct0hqfWLNALlEsJesfRG8/fvi+LZd9i3fyCjrM+z96/G +/2zQzdga4bOs3ZEBluYFYkhHRJw1rAF3LTcWYvjP0gjZYVQki7AsLb0me1selS6O +P1bXaLaSUvMsAVO0wOtHMXAoBgEybP4+OonLiMScjdQZ2KRQ8L8OwzuGt0yguPRy +7wQv4NrH8LQu+X7tlQox28kascZUNHxORbh9M/wWx/2htw88uXWb5vxbDe30Rras +mTg0Gxky/88ZWvYxr7PlhBRrrfkJQ9sF/RyygUFhpQaXTwspkpF+MZv+1X6ROHqR +OueSa606FrptZ5n4RRPjq0hVZQgWKMAlIxNSum+gFn/Z7Q9I6gKrGFxjkD65L1kK +BbvbHAomiTyphrMtBRP52VqsFr4NxCWzxr/ZSlwaxTEid2vYg3zm7ls4dHYjUiNR +cs/JZJTkXn2aVaILSQkr9/I0eOOH9t/APSXHY8urQuYsDdmOOL7J2tlh3w1ivP8A +vVeomdUr2jgn53pBzbaLlTfsZ9+UneuLcztLfqN+BydQq1bKWvn2j3GvUkmhE//M ++fpo+uGlslMLh8rjtRH1y9rtCKhLgIxLO4U/ZJksFcJAqF3mR+Xxkrf82LUrAg8x +Oj++3QhOJE7f+vKog8b0gGrySSwzII2Ar7KiJDVJaZpmbbXToBlcC7xoksN3Ra0E +15WxKBSRqb7gi2+ml02rwtFMzq93H05Uoa9mG8uf1QH8t/+o6fniFx5N5kKWmPMy +shXjaYg7NzEBAkxI4VO41faMxEj/CUV0klQDPbnAsTCrcYu7CS2lml3e0zVf6RB8 +plXee99DiWpHZTRoGzpInK3DpnGRP1Frgl1KyhT+HayFZeYSMHfVSFPk3CKKmtEp +r+J/SrpGnEx0NKK3f+MxflZfnMIvgjGxkHdgSaDpz9iTjveq176Bq1GmNLALotOq +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.crt b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.crt new file mode 100644 index 0000000000000..8ffb02e3d5794 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIVAOSHUsKiRx+ekWEEmfI2Q2q3B5hoMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTE5MDQzMDAzNTU0NloXDTI4MTEyODAzNTU0NlowFDESMBAG +A1UEAxMJdHJhbnNwb3J0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +wBaoGJ9vv9yFxCOg24CsVfwSThOPnea8oujexGZYDgKkCdtcVn03tlyomjOra/dL +PJ0zOvUyktTxv022VQNhkJ/PO+w/NKpHBHaAVZE0o2zvUf8xQqXoHw0S6rAhurs5 +50r8QRkh1Z3ky3uOcFs0pXYCR/2ZVmQNSBhqmhUSK5y0VURot1MtPMw1SeqyabZQ +upDTJ6um/zk2LalfChKJ3vGQGEW7AGfv10eIWSmqQx6rLWAGO4MDelbZhUUr5iFc +D4fW0/MNUXJHTBO5Dyq6n63Wsm0jTYK72bSVw8LZS+uabQCtcHtKUZh38uUEUCjp +MDVY7YmDv0i8qx/MvWasbwIDAQABo4HgMIHdMB0GA1UdDgQWBBQwoESvk9jbbTax +/+c5MCAFEvWW5TAfBgNVHSMEGDAWgBRI4mOaeunbu60GfjWTpHcvhb6/YTCBjwYD +VR0RBIGHMIGEgglsb2NhbGhvc3SCF2xvY2FsaG9zdDYubG9jYWxkb21haW42hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABggpsb2NhbGhvc3Q0ggpsb2NhbGhvc3Q2ghVs +b2NhbGhvc3QubG9jYWxkb21haW6CF2xvY2FsaG9zdDQubG9jYWxkb21haW40MAkG +A1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBAIQ8/PLfsZ1eKOWW74a4h/Uh5eh8 +u9Led1v+U9tszmULN8JoYSEgyql6zy2pJOuIVLwI9cUvrcypUSDL53NmWhTGAjEL +jbww/G1cngBh5cBzAPq3lRL2lwc8j3ZZ16I1eNyWastvBDdtANlDArCUamZoboBm +HE/jrssC9DOQhxAraiitH3YqjquqztEp1zIuqRI0qYTDFNPzyfyXIyCFIT+3eVI5 +22MqjFL+9IDuoET+VU1i22LhF32TEPotz2hfZTFddql0V1IOJQuVkDkQGFvaJMFy +Xw7d4orV3sxzQYd7muCoaao7g/F675KqpZiiVHqKxTOLafF/MPcfLhH6xZk= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.key b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.key new file mode 100644 index 0000000000000..f540e17202492 --- /dev/null +++ b/x-pack/plugin/security/qa/basic-enable-security/src/test/resources/ssl/transport.key @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,0B9EFA0829A750FB + +NCrPD7gkQ4Jr5/xIiohWILW3nO/WmNjApqOIc5g/wX/xJpk/554f8zCZ8dUD0D2E +ZW+z7Yj8GWKB0E6+hQZ+3ZUHLYASYSpSDVjg8UaaCxQyoVcUhshahFprqlzgU/An +Er8TbrGvhH0VmNlcQhaImqCOk41Hf8gjrxrtoLKbk3DfTk/Uuv4Jlsz4X+oSBVZN +fezIN70IZvGLKu7O3T9DeVLV1bLL6hNGIXnYe+FzLomMck2UoFv6uGS1VyFIGNf0 +ly80NGgdWTGxzLmiiGCgm5gbqbIehMsei1CC3jZIcfgfGyp4NVvF4HxFxZLTR3kY +YqzBWta/PoY6XXOlLFZupYt/YMt9hU6It9xdudPyNxwSuFXU66Fc08Ljj151iyhv +Ggf88jo9xSVvKOlqqHN6dY/xo9CfzTyuldG4jsKVHgGosSGghksjZ+PpHc7Mo5aP +S/UofhQgApJgU30TQPiQuJ+my/h9CiJyIgP7HnZtltwxg1k3dj+LxlpRKvjTOfuc +epOFmPeIdPkrQDir0j9+h+yoMgeqoT2unUYXw/qx5SVQxB5ckajLmJkUJPej9U3O +wASqNcWCTBEkGt102RU8o6lywdzBvfTB7gegR6oDvRfaxHOiUrRT/IwgszRfIdoC +fZa7Pb9pUuR3oY4uduDYgIKnxJhhQF2ERVXsfQeyxdiHEXvRnBFoAhoDjO8rWv07 +xiFPVMCAqXPImmdI34QezuzV2MUIVlKyeovbf+Kjv/Uat3zTj5FbmyVHcmPXpTY7 +t5iTQG+nQwz6UGcM5lF40EWrRdCzHEXNszwEY3Oz8D5rgBa6kxHYjcG9rzbTGlk2 +gsKdKA0am0hnCCJdTxbK5AkDcCWn/eclw0RPpbhFv5anvHTJ5WAWE7ZaACRuSfvy +UbNRGiWo4cNcR7+PGgV5184zjwJOql1mz+I79tlpxtK/FazP61WAYKOeEx1paKXX +syq+WDWgoZu/RzKDyTu10NUgq9J/IXDBn8/JjOVPCmPhMMLxNdoUhMfO4Ij9+3Jv +mH6ZaU6E+NZuc5N4Ivws42PwNY9FoyuLLgMBbezjhepQrDveHUK5v0weWqEapZ7Z +4KkFAeK7pjuItn5Of+233cp9Y68G8NrwMLQzI23kebNJwwzUMf3DnUJCXiy3PvrF +WpA0Q6/FspJgG3x2AXKo2QsHxydW+4w4pkawS9TCl0E03D7V6Gf17/HOxPDSH972 ++Yzzv8IkaOw5g+paeX9+tHjDFaxuvKiFyn/J7xYZAAQUoa2uQu440RakE73qLO34 +wtWdRzvIYitwLNJSfSojQDNoXuv8eyI/hP573cs6pmbheKXG1XKsWfpfj8sI7OkH +AdjRyeToSKbZ8yCn2vp0jyaRocOucu5oo7c0v+IocWOgdw+913EToJ6G3ck1heVR +b/U04VqKkXowO1YK7xDBAalMxyWq40spIKCC8HBBlng3vfUKqF46q9bMpesXnwPr +/00JfDVhFbqkJbqB8UYpjs9MN+vV5A7lsYbObom4pV25FSnwNSyxK0bhWGfZgutI +pjeQDkvHNG606AsqLz6SmIJP/GBBSMwvT3PGMPOO5XcayKeK3cbOQYJ0Yh7Muoqe +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index 9f5ef26f6e6a6..6487475f7c8c3 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -8,6 +8,9 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } forbiddenPatterns { @@ -45,4 +48,3 @@ integTestCluster { return http.wait(5000) } } - diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-migrate b/x-pack/plugin/security/src/main/bin/elasticsearch-migrate deleted file mode 100755 index 183722d9c9364..0000000000000 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-migrate +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -# or more contributor license agreements. Licensed under the Elastic License; -# you may not use this file except in compliance with the Elastic License. - -ES_MAIN_CLASS=org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool \ - ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ - "`dirname "$0"`"/elasticsearch-cli \ - "$@" diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-migrate.bat b/x-pack/plugin/security/src/main/bin/elasticsearch-migrate.bat deleted file mode 100644 index a50bc1a384ed0..0000000000000 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-migrate.bat +++ /dev/null @@ -1,19 +0,0 @@ -@echo off - -rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -rem or more contributor license agreements. Licensed under the Elastic License; -rem you may not use this file except in compliance with the Elastic License. - -setlocal enabledelayedexpansion -setlocal enableextensions - -set ES_MAIN_CLASS=org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool -set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env -call "%~dp0elasticsearch-cli.bat" ^ - %%* ^ - || goto exit - -endlocal -endlocal -:exit -exit /b %ERRORLEVEL% diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index a36a004c7f413..a6218522fb7e5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -258,8 +258,8 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.xpack.core.XPackSettings.API_KEY_SERVICE_ENABLED_SETTING; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; -import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_MAIN_TEMPLATE_7; public class Security extends Plugin implements ActionPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin, @@ -1002,7 +1002,7 @@ public Function> getFieldFilter() { public BiConsumer getJoinValidator() { if (enabled) { return new ValidateTLSOnJoin(XPackSettings.TRANSPORT_SSL_ENABLED.get(settings), - DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) + DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings), settings) .andThen(new ValidateUpgradedSecurityIndex()) .andThen(new ValidateLicenseForFIPS(XPackSettings.FIPS_MODE_ENABLED.get(settings))); } @@ -1012,18 +1012,21 @@ public BiConsumer getJoinValidator() { static final class ValidateTLSOnJoin implements BiConsumer { private final boolean isTLSEnabled; private final String discoveryType; + private final Settings settings; - ValidateTLSOnJoin(boolean isTLSEnabled, String discoveryType) { + ValidateTLSOnJoin(boolean isTLSEnabled, String discoveryType, Settings settings) { this.isTLSEnabled = isTLSEnabled; this.discoveryType = discoveryType; + this.settings = settings; } @Override public void accept(DiscoveryNode node, ClusterState state) { License license = LicenseService.getLicense(state.metaData()); - if (license != null && license.isProductionLicense() && - isTLSEnabled == false && "single-node".equals(discoveryType) == false) { - throw new IllegalStateException("TLS setup is required for license type [" + license.operationMode().name() + "]"); + if (isTLSEnabled == false && "single-node".equals(discoveryType) == false + && XPackLicenseState.isTransportTlsRequired(license, settings)) { + throw new IllegalStateException("Transport TLS ([" + XPackSettings.TRANSPORT_SSL_ENABLED.getKey() + + "]) is required for license type [" + license.operationMode().description() + "] when security is enabled"); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java index 1b4aff064a0c3..4bab16cf92115 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectAuthenticateAction.java @@ -7,6 +7,8 @@ import com.nimbusds.oauth2.sdk.id.State; import com.nimbusds.openid.connect.sdk.Nonce; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -36,6 +38,7 @@ public class TransportOpenIdConnectAuthenticateAction private final ThreadPool threadPool; private final AuthenticationService authenticationService; private final TokenService tokenService; + private static final Logger logger = LogManager.getLogger(TransportOpenIdConnectAuthenticateAction.class); @Inject public TransportOpenIdConnectAuthenticateAction(ThreadPool threadPool, TransportService transportService, @@ -67,9 +70,8 @@ protected void doExecute(Task task, OpenIdConnectAuthenticateRequest request, .get(OpenIdConnectRealm.CONTEXT_TOKEN_DATA); tokenService.createOAuth2Tokens(authentication, originatingAuthentication, tokenMetadata, true, ActionListener.wrap(tuple -> { - final String tokenString = tokenService.getAccessTokenAsString(tuple.v1()); final TimeValue expiresIn = tokenService.getExpirationDelay(); - listener.onResponse(new OpenIdConnectAuthenticateResponse(authentication.getUser().principal(), tokenString, + listener.onResponse(new OpenIdConnectAuthenticateResponse(authentication.getUser().principal(), tuple.v1(), tuple.v2(), expiresIn)); }, listener::onFailure)); }, e -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 6b61742eed262..96eec7e8fd6c7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -63,10 +63,9 @@ protected void doExecute(Task task, SamlAuthenticateRequest request, ActionListe final Map tokenMeta = (Map) result.getMetadata().get(SamlRealm.CONTEXT_TOKEN_DATA); tokenService.createOAuth2Tokens(authentication, originatingAuthentication, tokenMeta, true, ActionListener.wrap(tuple -> { - final String tokenString = tokenService.getAccessTokenAsString(tuple.v1()); final TimeValue expiresIn = tokenService.getExpirationDelay(); listener.onResponse( - new SamlAuthenticateResponse(authentication.getUser().principal(), tokenString, tuple.v2(), expiresIn)); + new SamlAuthenticateResponse(authentication.getUser().principal(), tuple.v1(), tuple.v2(), expiresIn)); }, listener::onFailure)); }, e -> { logger.debug(() -> new ParameterizedMessage("SamlToken [{}] could not be authenticated", saml), e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 4b648d5ed4bc0..65456ccd2af51 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -88,9 +88,8 @@ private void createToken(CreateTokenRequest request, Authentication authenticati boolean includeRefreshToken, ActionListener listener) { tokenService.createOAuth2Tokens(authentication, originatingAuth, Collections.emptyMap(), includeRefreshToken, ActionListener.wrap(tuple -> { - final String tokenStr = tokenService.getAccessTokenAsString(tuple.v1()); final String scope = getResponseScopeValue(request.getScope()); - final CreateTokenResponse response = new CreateTokenResponse(tokenStr, tokenService.getExpirationDelay(), scope, + final CreateTokenResponse response = new CreateTokenResponse(tuple.v1(), tokenService.getExpirationDelay(), scope, tuple.v2()); listener.onResponse(response); }, listener::onFailure)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 71aeb64bc4276..5c161d889cfb1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -31,11 +31,9 @@ public TransportRefreshTokenAction(TransportService transportService, ActionFilt @Override protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { tokenService.refreshToken(request.getRefreshToken(), ActionListener.wrap(tuple -> { - final String tokenStr = tokenService.getAccessTokenAsString(tuple.v1()); final String scope = getResponseScopeValue(request.getScope()); - final CreateTokenResponse response = - new CreateTokenResponse(tokenStr, tokenService.getExpirationDelay(), scope, tuple.v2()); + new CreateTokenResponse(tuple.v1(), tokenService.getExpirationDelay(), scope, tuple.v2()); listener.onResponse(response); }, listener::onFailure)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 6f96c9bf7dd88..a8f68870556e6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -86,6 +86,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.AuthenticationType; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; +import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -157,11 +158,12 @@ public final class TokenService { * Cheat Sheet and the * NIST Digital Identity Guidelines */ - private static final int ITERATIONS = 100000; + static final int TOKEN_SERVICE_KEY_ITERATIONS = 100000; + static final int TOKENS_ENCRYPTION_KEY_ITERATIONS = 1024; private static final String KDF_ALGORITHM = "PBKDF2withHMACSHA512"; - private static final int SALT_BYTES = 32; + static final int SALT_BYTES = 32; private static final int KEY_BYTES = 64; - private static final int IV_BYTES = 12; + static final int IV_BYTES = 12; private static final int VERSION_BYTES = 4; private static final String ENCRYPTION_CIPHER = "AES/GCM/NoPadding"; private static final String EXPIRED_TOKEN_WWW_AUTH_VALUE = "Bearer realm=\"" + XPackField.SECURITY + @@ -179,14 +181,18 @@ public final class TokenService { TimeValue.MINUS_ONE, Property.NodeScope); static final String TOKEN_DOC_TYPE = "token"; + private static final int HASHED_TOKEN_LENGTH = 44; + // UUIDs are 16 bytes encoded base64 without padding, therefore the length is (16 / 3) * 4 + ((16 % 3) * 8 + 5) / 6 chars + private static final int TOKEN_LENGTH = 22; private static final String TOKEN_DOC_ID_PREFIX = TOKEN_DOC_TYPE + "_"; - static final int MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1; + static final int LEGACY_MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1; + static final int MINIMUM_BYTES = VERSION_BYTES + TOKEN_LENGTH + 1; + static final int LEGACY_MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * LEGACY_MINIMUM_BYTES) / 3)).intValue(); static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue(); + static final Version VERSION_HASHED_TOKENS = Version.V_7_2_0; static final Version VERSION_TOKENS_INDEX_INTRODUCED = Version.V_7_2_0; static final Version VERSION_ACCESS_TOKENS_AS_UUIDS = Version.V_7_2_0; static final Version VERSION_MULTIPLE_CONCURRENT_REFRESHES = Version.V_7_2_0; - // UUIDs are 16 bytes encoded base64 without padding, therefore the length is (16 / 3) * 4 + ((16 % 3) * 8 + 5) / 6 chars - private static final int TOKEN_ID_LENGTH = 22; private static final Logger logger = LogManager.getLogger(TokenService.class); private final SecureRandom secureRandom = new SecureRandom(); @@ -235,31 +241,71 @@ public TokenService(Settings settings, Clock clock, Client client, XPackLicenseS } /** - * Creates an access token and optionally a refresh token as well, based on the provided authentication and metadata with an - * auto-generated token document id. The created tokens are stored in the security index. + * Creates an access token and optionally a refresh token as well, based on the provided authentication and metadata with + * auto-generated values. The created tokens are stored in the security index for versions up to + * {@link #VERSION_TOKENS_INDEX_INTRODUCED} and to a specific security tokens index for later versions. */ - public void createOAuth2Tokens(Authentication authentication, Authentication originatingClientAuth, - Map metadata, boolean includeRefreshToken, - ActionListener> listener) { + public void createOAuth2Tokens(Authentication authentication, Authentication originatingClientAuth, Map metadata, + boolean includeRefreshToken, ActionListener> listener) { // the created token is compatible with the oldest node version in the cluster final Version tokenVersion = getTokenVersionCompatibility(); // tokens moved to a separate index in newer versions final SecurityIndexManager tokensIndex = getTokensIndexForVersion(tokenVersion); // the id of the created tokens ought be unguessable - final String userTokenId = UUIDs.randomBase64UUID(); - createOAuth2Tokens(userTokenId, tokenVersion, tokensIndex, authentication, originatingClientAuth, metadata, includeRefreshToken, - listener); + final String accessToken = UUIDs.randomBase64UUID(); + final String refreshToken = includeRefreshToken ? UUIDs.randomBase64UUID() : null; + createOAuth2Tokens(accessToken, refreshToken, tokenVersion, tokensIndex, authentication, originatingClientAuth, metadata, listener); } /** - * Create an access token and optionally a refresh token as well, based on the provided authentication and metadata, with the given - * token document id. The created tokens are be stored in the security index. + * Creates an access token and optionally a refresh token as well from predefined values, based on the provided authentication and + * metadata. The created tokens are stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED} and to a + * specific security tokens index for later versions. + */ + //public for testing + public void createOAuth2Tokens(String accessToken, String refreshToken, Authentication authentication, + Authentication originatingClientAuth, + Map metadata, ActionListener> listener) { + // the created token is compatible with the oldest node version in the cluster + final Version tokenVersion = getTokenVersionCompatibility(); + // tokens moved to a separate index in newer versions + final SecurityIndexManager tokensIndex = getTokensIndexForVersion(tokenVersion); + createOAuth2Tokens(accessToken, refreshToken, tokenVersion, tokensIndex, authentication, originatingClientAuth, metadata, listener); + } + + /** + * Create an access token and optionally a refresh token as well from predefined values, based on the provided authentication and + * metadata. + * + * @param accessToken The predefined seed value for the access token. This will then be + *
    + *
  • Encrypted before stored for versions before {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Hashed before stored for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in a specific security tokens index for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Prepended with a version ID and encoded with Base64 before returned to the caller of the APIs
  • + *
+ * @param refreshToken The predefined seed value for the access token. This will then be + *
    + *
  • Hashed before stored for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in the security index for versions up to {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Stored in a specific security tokens index for versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
  • Prepended with a version ID and encoded with Base64 before returned to the caller of the APIs for + * versions after {@link #VERSION_TOKENS_INDEX_INTRODUCED}
  • + *
+ * @param tokenVersion The version of the nodes with which these tokens will be compatible. + * @param tokensIndex The security tokens index + * @param authentication The authentication object representing the user for which the tokens are created + * @param originatingClientAuth The authentication object representing the client that called the related API + * @param metadata A map with metadata to be stored in the token document + * @param listener The listener to call upon completion with a {@link Tuple} containing the + * serialized access token and serialized refresh token as these will be returned to the client */ - private void createOAuth2Tokens(String userTokenId, Version tokenVersion, SecurityIndexManager tokensIndex, + private void createOAuth2Tokens(String accessToken, String refreshToken, Version tokenVersion, SecurityIndexManager tokensIndex, Authentication authentication, Authentication originatingClientAuth, Map metadata, - boolean includeRefreshToken, ActionListener> listener) { - assert userTokenId.length() == TOKEN_ID_LENGTH : "We assume token ids have a fixed length for nodes of a certain version." - + " When changing the token length, be careful that the inferences about its length still hold."; + ActionListener> listener) { + assert accessToken.length() == TOKEN_LENGTH : "We assume token ids have a fixed length for nodes of a certain version." + + " When changing the token length, be careful that the inferences about its length still hold."; ensureEnabled(); if (authentication == null) { listener.onFailure(traceLog("create token", new IllegalArgumentException("authentication must be provided"))); @@ -269,10 +315,19 @@ private void createOAuth2Tokens(String userTokenId, Version tokenVersion, Securi } else { final Authentication tokenAuth = new Authentication(authentication.getUser(), authentication.getAuthenticatedBy(), authentication.getLookedUpBy(), tokenVersion, AuthenticationType.TOKEN, authentication.getMetadata()); - final UserToken userToken = new UserToken(userTokenId, tokenVersion, tokenAuth, getExpirationTime(), metadata); - final String plainRefreshToken = includeRefreshToken ? UUIDs.randomBase64UUID() : null; - final BytesReference tokenDocument = createTokenDocument(userToken, plainRefreshToken, originatingClientAuth); - final String documentId = getTokenDocumentId(userToken); + final String storedAccessToken; + final String storedRefreshToken; + if (tokenVersion.onOrAfter(VERSION_HASHED_TOKENS)) { + storedAccessToken = hashTokenString(accessToken); + storedRefreshToken = (null == refreshToken) ? null : hashTokenString(refreshToken); + } else { + storedAccessToken = accessToken; + storedRefreshToken = refreshToken; + } + final UserToken userToken = new UserToken(storedAccessToken, tokenVersion, tokenAuth, getExpirationTime(), metadata); + final BytesReference tokenDocument = createTokenDocument(userToken, storedRefreshToken, originatingClientAuth); + final String documentId = getTokenDocumentId(storedAccessToken); + final IndexRequest indexTokenRequest = client.prepareIndex(tokensIndex.aliasName(), SINGLE_MAPPING_NAME, documentId) .setOpType(OpType.CREATE) .setSource(tokenDocument, XContentType.JSON) @@ -283,15 +338,17 @@ private void createOAuth2Tokens(String userTokenId, Version tokenVersion, Securi () -> executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, indexTokenRequest, ActionListener.wrap(indexResponse -> { if (indexResponse.getResult() == Result.CREATED) { + final String versionedAccessToken = prependVersionAndEncodeAccessToken(tokenVersion, accessToken); if (tokenVersion.onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED)) { - final String versionedRefreshToken = plainRefreshToken != null - ? prependVersionAndEncode(tokenVersion, plainRefreshToken) - : null; - listener.onResponse(new Tuple<>(userToken, versionedRefreshToken)); + final String versionedRefreshToken = refreshToken != null + ? prependVersionAndEncodeRefreshToken(tokenVersion, refreshToken) + : null; + listener.onResponse(new Tuple<>(versionedAccessToken, versionedRefreshToken)); } else { - // prior versions are not version-prepended, as nodes on those versions don't expect it. + // prior versions of the refresh token are not version-prepended, as nodes on those + // versions don't expect it. // Such nodes might exist in a mixed cluster during a rolling upgrade. - listener.onResponse(new Tuple<>(userToken, plainRefreshToken)); + listener.onResponse(new Tuple<>(versionedAccessToken, refreshToken)); } } else { listener.onFailure(traceLog("create token", @@ -301,6 +358,15 @@ private void createOAuth2Tokens(String userTokenId, Version tokenVersion, Securi } } + /** + * Hashes an access or refresh token String so that it can safely be persisted in the index. We don't salt + * the values as these are v4 UUIDs that have enough entropy by themselves. + */ + // public for testing + public static String hashTokenString(String accessTokenString) { + return new String(Hasher.SHA256.hash(new SecureString(accessTokenString.toCharArray()))); + } + /** * Looks in the context to see if the request provided a header with a user token and if so the * token is validated, which might include authenticated decryption and verification that the token @@ -406,13 +472,24 @@ void decodeToken(String token, ActionListener listener) { final Version version = Version.readVersion(in); in.setVersion(version); if (version.onOrAfter(VERSION_ACCESS_TOKENS_AS_UUIDS)) { - // The token was created in a > VERSION_ACCESS_TOKENS_UUIDS cluster so it contains the tokenId as a String - String usedTokenId = in.readString(); - getUserTokenFromId(usedTokenId, version, listener); + // The token was created in a > VERSION_ACCESS_TOKENS_UUIDS cluster + if (in.available() < MINIMUM_BYTES) { + logger.debug("invalid token, smaller than [{}] bytes", MINIMUM_BYTES); + listener.onResponse(null); + return; + } + final String accessToken = in.readString(); + // TODO Remove this conditional after backporting to 7.x + if (version.onOrAfter(VERSION_HASHED_TOKENS)) { + final String userTokenId = hashTokenString(accessToken); + getUserTokenFromId(userTokenId, version, listener); + } else { + getUserTokenFromId(accessToken, version, listener); + } } else { // The token was created in a < VERSION_ACCESS_TOKENS_UUIDS cluster so we need to decrypt it to get the tokenId - if (in.available() < MINIMUM_BASE64_BYTES) { - logger.debug("invalid token, smaller than [{}] bytes", MINIMUM_BASE64_BYTES); + if (in.available() < LEGACY_MINIMUM_BYTES) { + logger.debug("invalid token, smaller than [{}] bytes", LEGACY_MINIMUM_BYTES); listener.onResponse(null); return; } @@ -709,8 +786,12 @@ private void indexInvalidation(Collection tokenIds, SecurityIndexManager /** * Called by the transport action in order to start the process of refreshing a token. + * + * @param refreshToken The refresh token as provided by the client + * @param listener The listener to call upon completion with a {@link Tuple} containing the + * serialized access token and serialized refresh token as these will be returned to the client */ - public void refreshToken(String refreshToken, ActionListener> listener) { + public void refreshToken(String refreshToken, ActionListener> listener) { ensureEnabled(); final Instant refreshRequested = clock.instant(); final Iterator backoff = DEFAULT_BACKOFF.iterator(); @@ -718,36 +799,49 @@ public void refreshToken(String refreshToken, ActionListener { final Authentication clientAuth = Authentication.readFromContext(client.threadPool().getThreadContext()); - innerRefresh(tokenDocHit.getId(), tokenDocHit.getSourceAsMap(), tokenDocHit.getSeqNo(), tokenDocHit.getPrimaryTerm(), - clientAuth, backoff, refreshRequested, listener); + innerRefresh(refreshToken, tokenDocHit.getId(), tokenDocHit.getSourceAsMap(), tokenDocHit.getSeqNo(), + tokenDocHit.getPrimaryTerm(), + clientAuth, backoff, refreshRequested, listener); }, listener::onFailure)); } /** - * Inferes the format and version of the passed in {@code refreshToken}. Delegates the actual search of the token document to + * Infers the format and version of the passed in {@code refreshToken}. Delegates the actual search of the token document to * {@code #findTokenFromRefreshToken(String, SecurityIndexManager, Iterator, ActionListener)} . */ private void findTokenFromRefreshToken(String refreshToken, Iterator backoff, ActionListener listener) { - if (refreshToken.length() == TOKEN_ID_LENGTH) { + if (refreshToken.length() == TOKEN_LENGTH) { // first check if token has the old format before the new version-prepended one logger.debug("Assuming an unversioned refresh token [{}], generated for node versions" - + " prior to the introduction of the version-header format.", refreshToken); + + " prior to the introduction of the version-header format.", refreshToken); findTokenFromRefreshToken(refreshToken, securityMainIndex, backoff, listener); } else { - try { - final Tuple versionAndRefreshTokenTuple = unpackVersionAndPayload(refreshToken); - final Version refreshTokenVersion = versionAndRefreshTokenTuple.v1(); - final String unencodedRefreshToken = versionAndRefreshTokenTuple.v2(); - if (false == refreshTokenVersion.onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED) - || unencodedRefreshToken.length() != TOKEN_ID_LENGTH) { - logger.debug("Decoded refresh token [{}] with version [{}] is invalid.", unencodedRefreshToken, refreshTokenVersion); + if (refreshToken.length() == HASHED_TOKEN_LENGTH) { + logger.debug("Assuming a hashed refresh token [{}] retrieved from the tokens index", refreshToken); + findTokenFromRefreshToken(refreshToken, securityTokensIndex, backoff, listener); + } else { + logger.debug("Assuming a refresh token [{}] provided from a client", refreshToken); + try { + final Tuple versionAndRefreshTokenTuple = unpackVersionAndPayload(refreshToken); + final Version refreshTokenVersion = versionAndRefreshTokenTuple.v1(); + final String unencodedRefreshToken = versionAndRefreshTokenTuple.v2(); + if (refreshTokenVersion.before(VERSION_TOKENS_INDEX_INTRODUCED) || unencodedRefreshToken.length() != TOKEN_LENGTH) { + logger.debug("Decoded refresh token [{}] with version [{}] is invalid.", unencodedRefreshToken, + refreshTokenVersion); + listener.onFailure(malformedTokenException()); + } else { + // TODO Remove this conditional after backporting to 7.x + if (refreshTokenVersion.onOrAfter(VERSION_HASHED_TOKENS)) { + final String hashedRefreshToken = hashTokenString(unencodedRefreshToken); + findTokenFromRefreshToken(hashedRefreshToken, securityTokensIndex, backoff, listener); + } else { + findTokenFromRefreshToken(unencodedRefreshToken, securityTokensIndex, backoff, listener); + } + } + } catch (IOException e) { + logger.debug(() -> new ParameterizedMessage("Could not decode refresh token [{}].", refreshToken), e); listener.onFailure(malformedTokenException()); - } else { - findTokenFromRefreshToken(unencodedRefreshToken, securityTokensIndex, backoff, listener); } - } catch (IOException e) { - logger.debug("Could not decode refresh token [" + refreshToken + "].", e); - listener.onFailure(malformedTokenException()); } } } @@ -763,7 +857,7 @@ private void findTokenFromRefreshToken(String refreshToken, SecurityIndexManager final Consumer maybeRetryOnFailure = ex -> { if (backoff.hasNext()) { final TimeValue backofTimeValue = backoff.next(); - logger.debug("retrying after [" + backofTimeValue + "] back off"); + logger.debug("retrying after [{}] back off", backofTimeValue); final Runnable retryWithContextRunnable = client.threadPool().getThreadContext() .preserveContext(() -> findTokenFromRefreshToken(refreshToken, tokensIndexManager, backoff, listener)); client.threadPool().schedule(retryWithContextRunnable, backofTimeValue, GENERIC); @@ -821,13 +915,14 @@ private void findTokenFromRefreshToken(String refreshToken, SecurityIndexManager * supersedes this one. The new document that contains the new access token and refresh token is created and finally the new access * token and refresh token are returned to the listener. */ - private void innerRefresh(String tokenDocId, Map source, long seqNo, long primaryTerm, Authentication clientAuth, - Iterator backoff, Instant refreshRequested, ActionListener> listener) { + private void innerRefresh(String refreshToken, String tokenDocId, Map source, long seqNo, long primaryTerm, + Authentication clientAuth, Iterator backoff, Instant refreshRequested, + ActionListener> listener) { logger.debug("Attempting to refresh token stored in token document [{}]", tokenDocId); final Consumer onFailure = ex -> listener.onFailure(traceLog("refresh token", tokenDocId, ex)); final Tuple> checkRefreshResult; try { - checkRefreshResult = checkTokenDocumentForRefresh(clock.instant(), clientAuth, source); + checkRefreshResult = checkTokenDocumentForRefresh(refreshRequested, clientAuth, source); } catch (DateTimeException | IllegalStateException e) { onFailure.accept(new ElasticsearchSecurityException("invalid token document", e)); return; @@ -838,23 +933,29 @@ private void innerRefresh(String tokenDocId, Map source, long se } final RefreshTokenStatus refreshTokenStatus = checkRefreshResult.v1(); if (refreshTokenStatus.isRefreshed()) { - logger.debug("Token document [{}] was recently refreshed, when a new token document [{}] was generated. Reusing that result.", - tokenDocId, refreshTokenStatus.getSupersededBy()); - getSupersedingTokenDocAsyncWithRetry(refreshTokenStatus, backoff, listener); + logger.debug("Token document [{}] was recently refreshed, when a new token document was generated. Reusing that result.", + tokenDocId); + decryptAndReturnSupersedingTokens(refreshToken, refreshTokenStatus, listener); } else { - final String newUserTokenId = UUIDs.randomBase64UUID(); + final String newAccessTokenString = UUIDs.randomBase64UUID(); + final String newRefreshTokenString = UUIDs.randomBase64UUID(); final Version newTokenVersion = getTokenVersionCompatibility(); final Map updateMap = new HashMap<>(); updateMap.put("refreshed", true); - updateMap.put("refresh_time", clock.instant().toEpochMilli()); - if (newTokenVersion.onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED)) { - // the superseding token document reference is formated as "|"; - // for now, only the ".security-tokens|" is a valid reference format - updateMap.put("superseded_by", securityTokensIndex.aliasName() + "|" + getTokenDocumentId(newUserTokenId)); - } else { - // preservers the format of the reference (without the alias prefix) - // so that old nodes in a mixed cluster can still understand it - updateMap.put("superseded_by", getTokenDocumentId(newUserTokenId)); + if (newTokenVersion.onOrAfter(VERSION_MULTIPLE_CONCURRENT_REFRESHES)) { + updateMap.put("refresh_time", clock.instant().toEpochMilli()); + try { + final byte[] iv = getRandomBytes(IV_BYTES); + final byte[] salt = getRandomBytes(SALT_BYTES); + String encryptedAccessAndRefreshToken = encryptSupersedingTokens(newAccessTokenString, + newRefreshTokenString, refreshToken, iv, salt); + updateMap.put("superseding.encrypted_tokens", encryptedAccessAndRefreshToken); + updateMap.put("superseding.encryption_iv", Base64.getEncoder().encodeToString(iv)); + updateMap.put("superseding.encryption_salt", Base64.getEncoder().encodeToString(salt)); + } catch (GeneralSecurityException e) { + logger.warn("could not encrypt access token and refresh token string", e); + onFailure.accept(invalidGrantException("could not refresh the requested token")); + } } assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO : "expected an assigned sequence number"; assert primaryTerm != SequenceNumbers.UNASSIGNED_PRIMARY_TERM : "expected an assigned primary term"; @@ -875,14 +976,15 @@ private void innerRefresh(String tokenDocId, Map source, long se updateResponse.getGetResult().sourceAsMap())); final Tuple parsedTokens = parseTokensFromDocument(source, null); final UserToken toRefreshUserToken = parsedTokens.v1(); - createOAuth2Tokens(newUserTokenId, newTokenVersion, getTokensIndexForVersion(newTokenVersion), - toRefreshUserToken.getAuthentication(), clientAuth, toRefreshUserToken.getMetadata(), true, listener); + createOAuth2Tokens(newAccessTokenString, newRefreshTokenString, newTokenVersion, + getTokensIndexForVersion(newTokenVersion), toRefreshUserToken.getAuthentication(), clientAuth, + toRefreshUserToken.getMetadata(), listener); } else if (backoff.hasNext()) { logger.info("failed to update the original token document [{}], the update result was [{}]. Retrying", tokenDocId, updateResponse.getResult()); final Runnable retryWithContextRunnable = client.threadPool().getThreadContext() - .preserveContext(() -> innerRefresh(tokenDocId, source, seqNo, primaryTerm, clientAuth, backoff, - refreshRequested, listener)); + .preserveContext(() -> innerRefresh(refreshToken, tokenDocId, source, seqNo, primaryTerm, clientAuth, + backoff, refreshRequested, listener)); client.threadPool().schedule(retryWithContextRunnable, backoff.next(), GENERIC); } else { logger.info("failed to update the original token document [{}] after all retries, the update result was [{}]. ", @@ -898,8 +1000,8 @@ private void innerRefresh(String tokenDocId, Map source, long se @Override public void onResponse(GetResponse response) { if (response.isExists()) { - innerRefresh(tokenDocId, response.getSource(), response.getSeqNo(), response.getPrimaryTerm(), - clientAuth, backoff, refreshRequested, listener); + innerRefresh(refreshToken, tokenDocId, response.getSource(), response.getSeqNo(), + response.getPrimaryTerm(), clientAuth, backoff, refreshRequested, listener); } else { logger.warn("could not find token document [{}] for refresh", tokenDocId); onFailure.accept(invalidGrantException("could not refresh the requested token")); @@ -927,8 +1029,8 @@ public void onFailure(Exception e) { if (backoff.hasNext()) { logger.debug("failed to update the original token document [{}], retrying", tokenDocId); final Runnable retryWithContextRunnable = client.threadPool().getThreadContext() - .preserveContext(() -> innerRefresh(tokenDocId, source, seqNo, primaryTerm, clientAuth, backoff, - refreshRequested, listener)); + .preserveContext(() -> innerRefresh(refreshToken, tokenDocId, source, seqNo, primaryTerm, + clientAuth, backoff, refreshRequested, listener)); client.threadPool().schedule(retryWithContextRunnable, backoff.next(), GENERIC); } else { logger.warn("failed to update the original token document [{}], after all retries", tokenDocId); @@ -941,72 +1043,47 @@ public void onFailure(Exception e) { } } - private void getSupersedingTokenDocAsyncWithRetry(RefreshTokenStatus refreshTokenStatus, Iterator backoff, - ActionListener> listener) { - final Consumer onFailure = ex -> listener - .onFailure(traceLog("get superseding token", refreshTokenStatus.getSupersededBy(), ex)); - getSupersedingTokenDocAsync(refreshTokenStatus, new ActionListener() { - private final Consumer maybeRetryOnFailure = ex -> { - if (backoff.hasNext()) { - final TimeValue backofTimeValue = backoff.next(); - logger.debug("retrying after [" + backofTimeValue + "] back off"); - final Runnable retryWithContextRunnable = client.threadPool().getThreadContext() - .preserveContext(() -> getSupersedingTokenDocAsync(refreshTokenStatus, this)); - client.threadPool().schedule(retryWithContextRunnable, backofTimeValue, GENERIC); - } else { - logger.warn("back off retries exhausted"); - onFailure.accept(ex); - } - }; - - @Override - public void onResponse(GetResponse response) { - if (response.isExists()) { - logger.debug("found superseding token document [{}] in index [{}] by following the [{}] reference", response.getId(), - response.getIndex(), refreshTokenStatus.getSupersededBy()); - final Tuple parsedTokens; - try { - parsedTokens = parseTokensFromDocument(response.getSource(), null); - } catch (IllegalStateException | DateTimeException e) { - logger.error("unable to decode existing user token", e); - listener.onFailure(new ElasticsearchSecurityException("could not refresh the requested token", e)); - return; - } - listener.onResponse(parsedTokens); - } else { - // We retry this since the creation of the superseding token document might already be in flight but not - // yet completed, triggered by a refresh request that came a few milliseconds ago - logger.info("could not find superseding token document from [{}] reference, retrying", - refreshTokenStatus.getSupersededBy()); - maybeRetryOnFailure.accept(invalidGrantException("could not refresh the requested token")); - } - } - - @Override - public void onFailure(Exception e) { - if (isShardNotAvailableException(e)) { - logger.info("could not find superseding token document from reference [{}], retrying", - refreshTokenStatus.getSupersededBy()); - maybeRetryOnFailure.accept(invalidGrantException("could not refresh the requested token")); - } else { - logger.warn("could not find superseding token document from reference [{}]", refreshTokenStatus.getSupersededBy()); - onFailure.accept(invalidGrantException("could not refresh the requested token")); - } + /** + * Decrypts the values of the superseding access token and the refresh token, using a key derived from the superseded refresh token. It + * encodes the version and serializes the tokens before calling the listener, in the same manner as {@link #createOAuth2Tokens } does. + * + * @param refreshToken The refresh token that the user sent in the request, used to derive the decryption key + * @param refreshTokenStatus The {@link RefreshTokenStatus} containing information about the superseding tokens as retrieved from the + * index + * @param listener The listener to call upon completion with a {@link Tuple} containing the + * serialized access token and serialized refresh token as these will be returned to the client + */ + void decryptAndReturnSupersedingTokens(String refreshToken, RefreshTokenStatus refreshTokenStatus, + ActionListener> listener) { + final byte[] iv = Base64.getDecoder().decode(refreshTokenStatus.getIv()); + final byte[] salt = Base64.getDecoder().decode(refreshTokenStatus.getSalt()); + final byte[] encryptedSupersedingTokens = Base64.getDecoder().decode(refreshTokenStatus.getSupersedingTokens()); + try { + Cipher cipher = getDecryptionCipher(iv, refreshToken, salt); + final String supersedingTokens = new String(cipher.doFinal(encryptedSupersedingTokens), StandardCharsets.UTF_8); + final String[] decryptedTokens = supersedingTokens.split("\\|"); + if (decryptedTokens.length != 2) { + logger.warn("Decrypted tokens string is not correctly formatted"); + listener.onFailure(invalidGrantException("could not refresh the requested token")); } - }); + listener.onResponse(new Tuple<>(prependVersionAndEncodeAccessToken(refreshTokenStatus.getVersion(), decryptedTokens[0]), + prependVersionAndEncodeRefreshToken(refreshTokenStatus.getVersion(), decryptedTokens[1]))); + } catch (GeneralSecurityException | IOException e) { + logger.warn("Could not get stored superseding token values", e); + listener.onFailure(invalidGrantException("could not refresh the requested token")); + } } - private void getSupersedingTokenDocAsync(RefreshTokenStatus refreshTokenStatus, ActionListener listener) { - final String supersedingDocReference = refreshTokenStatus.getSupersededBy(); - if (supersedingDocReference.startsWith(securityTokensIndex.aliasName() + "|")) { - // superseding token doc is stored on the new tokens index, irrespective of where the superseded token doc resides - final String supersedingDocId = supersedingDocReference.substring(securityTokensIndex.aliasName().length() + 1); - getTokenDocAsync(supersedingDocId, securityTokensIndex, listener); - } else { - assert false == supersedingDocReference - .contains("|") : "The superseding doc reference appears to contain an alias name but should not"; - getTokenDocAsync(supersedingDocReference, securityMainIndex, listener); - } + /* + * Encrypts the values of the superseding access token and the refresh token, using a key derived from the superseded refresh token. + * The tokens are concatenated to a string separated with `|` before encryption so that we only perform one encryption operation + * and that we only need to store one field + */ + String encryptSupersedingTokens(String supersedingAccessToken, String supersedingRefreshToken, + String refreshToken, byte[] iv, byte[] salt) throws GeneralSecurityException { + Cipher cipher = getEncryptionCipher(iv, refreshToken, salt); + final String supersedingTokens = supersedingAccessToken + "|" + supersedingRefreshToken; + return Base64.getEncoder().encodeToString(cipher.doFinal(supersedingTokens.getBytes(StandardCharsets.UTF_8))); } private void getTokenDocAsync(String tokenDocId, SecurityIndexManager tokensIndex, ActionListener listener) { @@ -1016,7 +1093,7 @@ private void getTokenDocAsync(String tokenDocId, SecurityIndexManager tokensInde () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, listener, client::get)); } - private Version getTokenVersionCompatibility() { + Version getTokenVersionCompatibility() { // newly minted tokens are compatible with the min node version in the cluster return clusterService.state().nodes().getMinNodeVersion(); } @@ -1029,13 +1106,13 @@ public static Boolean isTokenServiceEnabled(Settings settings) { * A refresh token has a fixed maximum lifetime of {@code ExpiredTokenRemover#MAXIMUM_TOKEN_LIFETIME_HOURS} hours. This checks if the * token document represents a valid token wrt this time interval. */ - private static Optional checkTokenDocumentExpired(Instant now, Map source) { - final Long creationEpochMilli = (Long) source.get("creation_time"); + private static Optional checkTokenDocumentExpired(Instant refreshRequested, Map src) { + final Long creationEpochMilli = (Long) src.get("creation_time"); if (creationEpochMilli == null) { throw new IllegalStateException("token document is missing creation time value"); } else { final Instant creationTime = Instant.ofEpochMilli(creationEpochMilli); - if (now.isAfter(creationTime.plus(ExpiredTokenRemover.MAXIMUM_TOKEN_LIFETIME_HOURS, ChronoUnit.HOURS))) { + if (refreshRequested.isAfter(creationTime.plus(ExpiredTokenRemover.MAXIMUM_TOKEN_LIFETIME_HOURS, ChronoUnit.HOURS))) { return Optional.of(invalidGrantException("token document has expired")); } else { return Optional.empty(); @@ -1048,17 +1125,17 @@ private static Optional checkTokenDocumentExpire * parsed {@code RefreshTokenStatus} together with an {@code Optional} validation exception that encapsulates the various logic about * when and by who a token can be refreshed. */ - private static Tuple> checkTokenDocumentForRefresh(Instant now, - Authentication clientAuth, Map source) throws IllegalStateException, DateTimeException { + private static Tuple> checkTokenDocumentForRefresh( + Instant refreshRequested, Authentication clientAuth, Map source) throws IllegalStateException, DateTimeException { final RefreshTokenStatus refreshTokenStatus = RefreshTokenStatus.fromSourceMap(getRefreshTokenSourceMap(source)); final UserToken userToken = UserToken.fromSourceMap(getUserTokenSourceMap(source)); refreshTokenStatus.setVersion(userToken.getVersion()); - final ElasticsearchSecurityException validationException = checkTokenDocumentExpired(now, source).orElseGet(() -> { + final ElasticsearchSecurityException validationException = checkTokenDocumentExpired(refreshRequested, source).orElseGet(() -> { if (refreshTokenStatus.isInvalidated()) { return invalidGrantException("token has been invalidated"); } else { return checkClientCanRefresh(refreshTokenStatus, clientAuth) - .orElse(checkMultipleRefreshes(now, refreshTokenStatus).orElse(null)); + .orElse(checkMultipleRefreshes(refreshRequested, refreshTokenStatus).orElse(null)); } }); return new Tuple<>(refreshTokenStatus, Optional.ofNullable(validationException)); @@ -1111,13 +1188,14 @@ private static Map getUserTokenSourceMap(Map sou * @return An {@code Optional} containing the exception in case this refresh token cannot be reused, or an empty Optional if * refreshing is allowed. */ - private static Optional checkMultipleRefreshes(Instant now, RefreshTokenStatus refreshTokenStatus) { + private static Optional checkMultipleRefreshes(Instant refreshRequested, + RefreshTokenStatus refreshTokenStatus) { if (refreshTokenStatus.isRefreshed()) { if (refreshTokenStatus.getVersion().onOrAfter(VERSION_MULTIPLE_CONCURRENT_REFRESHES)) { - if (now.isAfter(refreshTokenStatus.getRefreshInstant().plus(30L, ChronoUnit.SECONDS))) { + if (refreshRequested.isAfter(refreshTokenStatus.getRefreshInstant().plus(30L, ChronoUnit.SECONDS))) { return Optional.of(invalidGrantException("token has already been refreshed more than 30 seconds in the past")); } - if (now.isBefore(refreshTokenStatus.getRefreshInstant().minus(30L, ChronoUnit.SECONDS))) { + if (refreshRequested.isBefore(refreshTokenStatus.getRefreshInstant().minus(30L, ChronoUnit.SECONDS))) { return Optional .of(invalidGrantException("token has been refreshed more than 30 seconds in the future, clock skew too great")); } @@ -1269,7 +1347,7 @@ private void sourceIndicesWithTokensAndRun(ActionListener> listener private BytesReference createTokenDocument(UserToken userToken, @Nullable String refreshToken, @Nullable Authentication originatingClientAuth) { assert refreshToken == null || originatingClientAuth != null : "non-null refresh token " + refreshToken - + " requires non-null client authn " + originatingClientAuth; + + " requires non-null client authn " + originatingClientAuth; try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); builder.field("doc_type", TOKEN_DOC_TYPE); @@ -1332,21 +1410,14 @@ private Tuple filterAndParseHit(SearchHit hit, @Nullable Pred */ private Tuple parseTokensFromDocument(Map source, @Nullable Predicate> filter) throws IllegalStateException, DateTimeException { - final String plainRefreshToken = (String) ((Map) source.get("refresh_token")).get("token"); + final String hashedRefreshToken = (String) ((Map) source.get("refresh_token")).get("token"); final Map userTokenSource = (Map) ((Map) source.get("access_token")).get("user_token"); if (null != filter && filter.test(userTokenSource) == false) { return null; } final UserToken userToken = UserToken.fromSourceMap(userTokenSource); - if (userToken.getVersion().onOrAfter(VERSION_TOKENS_INDEX_INTRODUCED)) { - final String versionedRefreshToken = plainRefreshToken != null ? - prependVersionAndEncode(userToken.getVersion(), plainRefreshToken) : null; - return new Tuple<>(userToken, versionedRefreshToken); - } else { - // do not prepend version to refresh token as the audience node version cannot deal with it - return new Tuple<>(userToken, plainRefreshToken); - } + return new Tuple<>(userToken, hashedRefreshToken); } private static String getTokenDocumentId(UserToken userToken) { @@ -1450,7 +1521,7 @@ public TimeValue getExpirationDelay() { return expirationDelay; } - private Instant getExpirationTime() { + Instant getExpirationTime() { return clock.instant().plusSeconds(expirationDelay.getSeconds()); } @@ -1478,38 +1549,34 @@ private String getFromHeader(ThreadContext threadContext) { return null; } - /** - * Serializes a token to a String containing the minimum compatible node version for decoding it back and either an encrypted - * representation of the token id for versions earlier to {@code #VERSION_ACCESS_TOKENS_UUIDS} or the token itself for versions after - * {@code #VERSION_ACCESS_TOKENS_UUIDS} - */ - public String getAccessTokenAsString(UserToken userToken) throws IOException, GeneralSecurityException { - if (userToken.getVersion().onOrAfter(VERSION_ACCESS_TOKENS_AS_UUIDS)) { + String prependVersionAndEncodeAccessToken(Version version, String accessToken) throws IOException, GeneralSecurityException { + if (version.onOrAfter(VERSION_ACCESS_TOKENS_AS_UUIDS)) { try (ByteArrayOutputStream os = new ByteArrayOutputStream(MINIMUM_BASE64_BYTES); OutputStream base64 = Base64.getEncoder().wrap(os); StreamOutput out = new OutputStreamStreamOutput(base64)) { - out.setVersion(userToken.getVersion()); - Version.writeVersion(userToken.getVersion(), out); - out.writeString(userToken.getId()); + out.setVersion(version); + Version.writeVersion(version, out); + out.writeString(accessToken); return new String(os.toByteArray(), StandardCharsets.UTF_8); } } else { // we know that the minimum length is larger than the default of the ByteArrayOutputStream so set the size to this explicitly - try (ByteArrayOutputStream os = new ByteArrayOutputStream(MINIMUM_BASE64_BYTES); + try (ByteArrayOutputStream os = new ByteArrayOutputStream(LEGACY_MINIMUM_BASE64_BYTES); OutputStream base64 = Base64.getEncoder().wrap(os); StreamOutput out = new OutputStreamStreamOutput(base64)) { - out.setVersion(userToken.getVersion()); + out.setVersion(version); KeyAndCache keyAndCache = keyCache.activeKeyCache; - Version.writeVersion(userToken.getVersion(), out); + Version.writeVersion(version, out); out.writeByteArray(keyAndCache.getSalt().bytes); out.writeByteArray(keyAndCache.getKeyHash().bytes); - final byte[] initializationVector = getNewInitializationVector(); + final byte[] initializationVector = getRandomBytes(IV_BYTES); out.writeByteArray(initializationVector); try (CipherOutputStream encryptedOutput = - new CipherOutputStream(out, getEncryptionCipher(initializationVector, keyAndCache, userToken.getVersion())); + new CipherOutputStream(out, getEncryptionCipher(initializationVector, keyAndCache, version)); StreamOutput encryptedStreamOutput = new OutputStreamStreamOutput(encryptedOutput)) { - encryptedStreamOutput.setVersion(userToken.getVersion()); - encryptedStreamOutput.writeString(userToken.getId()); + encryptedStreamOutput.setVersion(version); + encryptedStreamOutput.writeString(accessToken); + // StreamOutput needs to be closed explicitly because it wraps CipherOutputStream encryptedStreamOutput.close(); return new String(os.toByteArray(), StandardCharsets.UTF_8); } @@ -1517,7 +1584,7 @@ public String getAccessTokenAsString(UserToken userToken) throws IOException, Ge } } - private static String prependVersionAndEncode(Version version, String payload) { + static String prependVersionAndEncodeRefreshToken(Version version, String payload) { try (ByteArrayOutputStream os = new ByteArrayOutputStream(); OutputStream base64 = Base64.getEncoder().wrap(os); StreamOutput out = new OutputStreamStreamOutput(base64)) { @@ -1563,6 +1630,17 @@ Cipher getEncryptionCipher(byte[] iv, KeyAndCache keyAndCache, Version version) return cipher; } + /** + * Initialize the encryption cipher using the provided password to derive the encryption key. + */ + Cipher getEncryptionCipher(byte[] iv, String password, byte[] salt) throws GeneralSecurityException { + SecretKey key = computeSecretKey(password.toCharArray(), salt, TOKENS_ENCRYPTION_KEY_ITERATIONS); + Cipher cipher = Cipher.getInstance(ENCRYPTION_CIPHER); + cipher.init(Cipher.ENCRYPT_MODE, key, new GCMParameterSpec(128, iv), secureRandom); + cipher.updateAAD(salt); + return cipher; + } + private void getKeyAsync(BytesKey decodedSalt, KeyAndCache keyAndCache, ActionListener listener) { final SecretKey decodeKey = keyAndCache.getKey(decodedSalt); if (decodeKey != null) { @@ -1595,21 +1673,31 @@ private Cipher getDecryptionCipher(byte[] iv, SecretKey key, Version version, By return cipher; } - // Package private for testing - byte[] getNewInitializationVector() { - final byte[] initializationVector = new byte[IV_BYTES]; - secureRandom.nextBytes(initializationVector); - return initializationVector; + /** + * Initialize the decryption cipher using the provided password to derive the decryption key. + */ + private Cipher getDecryptionCipher(byte[] iv, String password, byte[] salt) throws GeneralSecurityException { + SecretKey key = computeSecretKey(password.toCharArray(), salt, TOKENS_ENCRYPTION_KEY_ITERATIONS); + Cipher cipher = Cipher.getInstance(ENCRYPTION_CIPHER); + cipher.init(Cipher.DECRYPT_MODE, key, new GCMParameterSpec(128, iv), secureRandom); + cipher.updateAAD(salt); + return cipher; + } + + byte[] getRandomBytes(int length) { + final byte[] bytes = new byte[length]; + secureRandom.nextBytes(bytes); + return bytes; } /** * Generates a secret key based off of the provided password and salt. - * This method is computationally expensive. + * This method can be computationally expensive. */ - static SecretKey computeSecretKey(char[] rawPassword, byte[] salt) + static SecretKey computeSecretKey(char[] rawPassword, byte[] salt, int iterations) throws NoSuchAlgorithmException, InvalidKeySpecException { SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance(KDF_ALGORITHM); - PBEKeySpec keySpec = new PBEKeySpec(rawPassword, salt, ITERATIONS, 128); + PBEKeySpec keySpec = new PBEKeySpec(rawPassword, salt, iterations, 128); SecretKey tmp = secretKeyFactory.generateSecret(keySpec); return new SecretKeySpec(tmp.getEncoded(), "AES"); } @@ -2003,7 +2091,7 @@ private KeyAndCache(KeyAndTimestamp keyAndTimestamp, BytesKey salt) { .setMaximumWeight(500L) .build(); try { - SecretKey secretKey = computeSecretKey(keyAndTimestamp.getKey().getChars(), salt.bytes); + SecretKey secretKey = computeSecretKey(keyAndTimestamp.getKey().getChars(), salt.bytes, TOKEN_SERVICE_KEY_ITERATIONS); keyCache.put(salt, secretKey); } catch (Exception e) { throw new IllegalStateException(e); @@ -2019,7 +2107,7 @@ private SecretKey getKey(BytesKey salt) { public SecretKey getOrComputeKey(BytesKey decodedSalt) throws ExecutionException { return keyCache.computeIfAbsent(decodedSalt, (salt) -> { try (SecureString closeableChars = keyAndTimestamp.getKey().clone()) { - return computeSecretKey(closeableChars.getChars(), salt.bytes); + return computeSecretKey(closeableChars.getChars(), salt.bytes, TOKEN_SERVICE_KEY_ITERATIONS); } }); } @@ -2074,24 +2162,32 @@ KeyAndCache get(BytesKey passphraseHash) { /** * Contains metadata associated with the refresh token that is used for validity checks, but does not contain the proper token string. */ - private static final class RefreshTokenStatus { + static final class RefreshTokenStatus { private final boolean invalidated; private final String associatedUser; private final String associatedRealm; private final boolean refreshed; @Nullable private final Instant refreshInstant; - @Nullable private final String supersededBy; + @Nullable + private final String supersedingTokens; + @Nullable + private final String iv; + @Nullable + private final String salt; private Version version; - private RefreshTokenStatus(boolean invalidated, String associatedUser, String associatedRealm, boolean refreshed, - Instant refreshInstant, String supersededBy) { + // pkg-private for testing + RefreshTokenStatus(boolean invalidated, String associatedUser, String associatedRealm, boolean refreshed, Instant refreshInstant, + String supersedingTokens, String iv, String salt) { this.invalidated = invalidated; this.associatedUser = associatedUser; this.associatedRealm = associatedRealm; this.refreshed = refreshed; this.refreshInstant = refreshInstant; - this.supersededBy = supersededBy; + this.supersedingTokens = supersedingTokens; + this.iv = iv; + this.salt = salt; } boolean isInvalidated() { @@ -2114,8 +2210,19 @@ boolean isRefreshed() { return refreshInstant; } - @Nullable String getSupersededBy() { - return supersededBy; + @Nullable + String getSupersedingTokens() { + return supersedingTokens; + } + + @Nullable + String getIv() { + return iv; + } + + @Nullable + String getSalt() { + return salt; } Version getVersion() { @@ -2149,8 +2256,11 @@ static RefreshTokenStatus fromSourceMap(Map refreshTokenSource) } final Long refreshEpochMilli = (Long) refreshTokenSource.get("refresh_time"); final Instant refreshInstant = refreshEpochMilli == null ? null : Instant.ofEpochMilli(refreshEpochMilli); - final String supersededBy = (String) refreshTokenSource.get("superseded_by"); - return new RefreshTokenStatus(invalidated, associatedUser, associatedRealm, refreshed, refreshInstant, supersededBy); + final String supersedingTokens = (String) refreshTokenSource.get("superseding.encrypted_tokens"); + final String iv = (String) refreshTokenSource.get("superseding.encryption_iv"); + final String salt = (String) refreshTokenSource.get("superseding.encryption_salt"); + return new RefreshTokenStatus(invalidated, associatedUser, associatedRealm, refreshed, refreshInstant, supersedingTokens, + iv, salt); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/UserToken.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/UserToken.java index 2bcf0849084bc..f46aa42a24450 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/UserToken.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/UserToken.java @@ -50,7 +50,7 @@ public final class UserToken implements Writeable, ToXContentObject { /** * Create a new token with an autogenerated id */ - UserToken(Version version, Authentication authentication, Instant expirationTime, Map metadata) { + private UserToken(Version version, Authentication authentication, Instant expirationTime, Map metadata) { this(UUIDs.randomBase64UUID(), version, authentication, expirationTime, metadata); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java deleted file mode 100644 index 6368f4a7510c9..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateTool.java +++ /dev/null @@ -1,398 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc.esnative; - -import joptsimple.OptionParser; -import joptsimple.OptionSet; -import joptsimple.OptionSpec; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.core.Appender; -import org.apache.logging.log4j.core.LogEvent; -import org.apache.logging.log4j.core.LoggerContext; -import org.apache.logging.log4j.core.appender.AbstractAppender; -import org.apache.logging.log4j.core.config.Configuration; -import org.apache.logging.log4j.core.config.LoggerConfig; -import org.apache.logging.log4j.core.layout.PatternLayout; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cli.EnvironmentAwareCommand; -import org.elasticsearch.cli.LoggingAwareMultiCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.Terminal.Verbosity; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.common.socket.SocketAccess; -import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.ssl.SSLConfiguration; -import org.elasticsearch.xpack.security.authz.store.FileRolesStore; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.authc.file.FileUserPasswdStore; -import org.elasticsearch.xpack.security.authc.file.FileUserRolesStore; - -import javax.net.ssl.HttpsURLConnection; - -import java.io.BufferedReader; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.net.HttpURLConnection; -import java.net.URI; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; - -/** - * This is the command-line tool used for migrating users and roles from the file-based realm into the new native realm using the API for - * import. It reads from the files and tries its best to add the users, showing an error if it was incapable of importing them. Any existing - * users or roles are skipped. - */ -public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand { - - public static void main(String[] args) throws Exception { - exit(new ESNativeRealmMigrateTool().main(args, Terminal.DEFAULT)); - } - - public ESNativeRealmMigrateTool() { - super("Imports file-based users and roles to the native security realm"); - subcommands.put("native", newMigrateUserOrRoles()); - } - - protected MigrateUserOrRoles newMigrateUserOrRoles() { - return new MigrateUserOrRoles(); - } - - /** - * Command to migrate users and roles to the native realm - */ - public static class MigrateUserOrRoles extends EnvironmentAwareCommand { - - private final OptionSpec username; - private final OptionSpec password; - private final OptionSpec url; - private final OptionSpec usersToMigrateCsv; - private final OptionSpec rolesToMigrateCsv; - - public MigrateUserOrRoles() { - super("Migrates users or roles from file to native realm"); - this.username = parser.acceptsAll(Arrays.asList("u", "username"), - "User used to authenticate with Elasticsearch") - .withRequiredArg().required(); - this.password = parser.acceptsAll(Arrays.asList("p", "password"), - "Password used to authenticate with Elasticsearch") - .withRequiredArg().required(); - this.url = parser.acceptsAll(Arrays.asList("U", "url"), - "URL of Elasticsearch host") - .withRequiredArg(); - this.usersToMigrateCsv = parser.acceptsAll(Arrays.asList("n", "users"), - "Users to migrate from file to native realm") - .withRequiredArg(); - this.rolesToMigrateCsv = parser.acceptsAll(Arrays.asList("r", "roles"), - "Roles to migrate from file to native realm") - .withRequiredArg(); - } - - // Visible for testing - public OptionParser getParser() { - return this.parser; - } - - @Override - protected void printAdditionalHelp(Terminal terminal) { - terminal.println("This tool migrates file based users[1] and roles[2] to the native realm in"); - terminal.println("elasticsearch, saving the administrator from needing to manually transition"); - terminal.println("them from the file."); - } - - // Visible for testing - @Override - public void execute(Terminal terminal, OptionSet options, Environment env) throws Exception { - terminal.println("starting migration of users and roles..."); - importUsers(terminal, env, options); - importRoles(terminal, env, options); - terminal.println("users and roles imported."); - } - - @SuppressForbidden(reason = "We call connect in doPrivileged and provide SocketPermission") - private String postURL(Settings settings, Environment env, String method, String urlString, - OptionSet options, @Nullable String bodyString) throws Exception { - URI uri = new URI(urlString); - URL url = uri.toURL(); - HttpURLConnection conn; - // If using SSL, need a custom service because it's likely a self-signed certificate - if ("https".equalsIgnoreCase(uri.getScheme())) { - final SSLService sslService = new SSLService(settings, env); - final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.http.ssl"); - final HttpsURLConnection httpsConn = (HttpsURLConnection) url.openConnection(); - AccessController.doPrivileged((PrivilegedAction) () -> { - // Requires permission java.lang.RuntimePermission "setFactory"; - httpsConn.setSSLSocketFactory(sslService.sslSocketFactory(sslConfiguration)); - return null; - }); - conn = httpsConn; - } else { - conn = (HttpURLConnection) url.openConnection(); - } - conn.setRequestMethod(method); - conn.setReadTimeout(30 * 1000); // 30 second timeout - // Add basic-auth header - conn.setRequestProperty("Authorization", - UsernamePasswordToken.basicAuthHeaderValue(username.value(options), - new SecureString(password.value(options).toCharArray()))); - conn.setRequestProperty("Content-Type", XContentType.JSON.mediaType()); - conn.setDoOutput(true); // we'll be sending a body - SocketAccess.doPrivileged(conn::connect); - if (bodyString != null) { - try (OutputStream out = conn.getOutputStream()) { - out.write(bodyString.getBytes(StandardCharsets.UTF_8)); - } catch (Exception e) { - try { - conn.disconnect(); - } catch (Exception e2) { - // Ignore exceptions if we weren't able to close the connection after an error - } - throw e; - } - } - try (BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8))) { - StringBuilder sb = new StringBuilder(); - String line = null; - while ((line = reader.readLine()) != null) { - sb.append(line); - } - return sb.toString(); - } catch (IOException e) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getErrorStream(), StandardCharsets.UTF_8))) { - StringBuilder sb = new StringBuilder(); - String line = null; - while ((line = reader.readLine()) != null) { - sb.append(line); - } - throw new IOException(sb.toString(), e); - } - } finally { - conn.disconnect(); - } - } - - Set getUsersThatExist(Terminal terminal, Settings settings, Environment env, OptionSet options) throws Exception { - Set existingUsers = new HashSet<>(); - String allUsersJson = postURL(settings, env, "GET", this.url.value(options) + "/_security/user/", options, null); - // EMPTY is safe here because we never use namedObject - try (XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, allUsersJson)) { - XContentParser.Token token = parser.nextToken(); - String userName; - if (token == XContentParser.Token.START_OBJECT) { - while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { - userName = parser.currentName(); - existingUsers.add(userName); - parser.nextToken(); - parser.skipChildren(); - } - } else { - throw new ElasticsearchException("failed to retrieve users, expecting an object but got: " + token); - } - } - terminal.println("found existing users: " + existingUsers); - return existingUsers; - } - - static String createUserJson(String[] roles, char[] password) throws IOException { - XContentBuilder builder = jsonBuilder(); - builder.startObject(); - { - builder.field("password_hash", new String(password)); - builder.startArray("roles"); - for (String role : roles) { - builder.value(role); - } - builder.endArray(); - } - builder.endObject(); - return Strings.toString(builder); - } - - void importUsers(Terminal terminal, Environment env, OptionSet options) throws FileNotFoundException { - String usersCsv = usersToMigrateCsv.value(options); - String[] usersToMigrate = (usersCsv != null) ? usersCsv.split(",") : Strings.EMPTY_ARRAY; - Path usersFile = FileUserPasswdStore.resolveFile(env); - Path usersRolesFile = FileUserRolesStore.resolveFile(env); - if (Files.exists(usersFile) == false) { - throw new FileNotFoundException("users file [" + usersFile + "] does not exist"); - } else if (Files.exists(usersRolesFile) == false) { - throw new FileNotFoundException("users_roles file [" + usersRolesFile + "] does not exist"); - } - - terminal.println("importing users from [" + usersFile + "]..."); - final Logger logger = getTerminalLogger(terminal); - Map userToHashedPW = FileUserPasswdStore.parseFile(usersFile, logger, env.settings()); - Map userToRoles = FileUserRolesStore.parseFile(usersRolesFile, logger); - Set existingUsers; - try { - existingUsers = getUsersThatExist(terminal, env.settings(), env, options); - } catch (Exception e) { - throw new ElasticsearchException("failed to get users that already exist, skipping user import", e); - } - if (usersToMigrate.length == 0) { - usersToMigrate = userToHashedPW.keySet().toArray(new String[userToHashedPW.size()]); - } - for (String user : usersToMigrate) { - if (userToHashedPW.containsKey(user) == false) { - terminal.println("user [" + user + "] was not found in files, skipping"); - continue; - } else if (existingUsers.contains(user)) { - terminal.println("user [" + user + "] already exists, skipping"); - continue; - } - terminal.println("migrating user [" + user + "]"); - String reqBody = "n/a"; - try { - reqBody = createUserJson(userToRoles.get(user), userToHashedPW.get(user)); - String resp = postURL(env.settings(), env, "POST", - this.url.value(options) + "/_security/user/" + user, options, reqBody); - terminal.println(resp); - } catch (Exception e) { - throw new ElasticsearchException("failed to migrate user [" + user + "] with body: " + reqBody, e); - } - } - } - - Set getRolesThatExist(Terminal terminal, Settings settings, Environment env, OptionSet options) throws Exception { - Set existingRoles = new HashSet<>(); - String allRolesJson = postURL(settings, env, "GET", this.url.value(options) + "/_security/role/", options, null); - // EMPTY is safe here because we never use namedObject - try (XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, allRolesJson)) { - XContentParser.Token token = parser.nextToken(); - String roleName; - if (token == XContentParser.Token.START_OBJECT) { - while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { - roleName = parser.currentName(); - existingRoles.add(roleName); - parser.nextToken(); - parser.skipChildren(); - } - } else { - throw new ElasticsearchException("failed to retrieve roles, expecting an object but got: " + token); - } - } - terminal.println("found existing roles: " + existingRoles); - return existingRoles; - } - - static String createRoleJson(RoleDescriptor rd) throws IOException { - XContentBuilder builder = jsonBuilder(); - rd.toXContent(builder, ToXContent.EMPTY_PARAMS, true); - return Strings.toString(builder); - } - - void importRoles(Terminal terminal, Environment env, OptionSet options) throws FileNotFoundException { - String rolesCsv = rolesToMigrateCsv.value(options); - String[] rolesToMigrate = (rolesCsv != null) ? rolesCsv.split(",") : Strings.EMPTY_ARRAY; - Path rolesFile = FileRolesStore.resolveFile(env).toAbsolutePath(); - if (Files.exists(rolesFile) == false) { - throw new FileNotFoundException("roles.yml file [" + rolesFile + "] does not exist"); - } - terminal.println("importing roles from [" + rolesFile + "]..."); - Logger logger = getTerminalLogger(terminal); - Map roles = FileRolesStore.parseRoleDescriptors(rolesFile, logger, true, Settings.EMPTY); - Set existingRoles; - try { - existingRoles = getRolesThatExist(terminal, env.settings(), env, options); - } catch (Exception e) { - throw new ElasticsearchException("failed to get roles that already exist, skipping role import", e); - } - if (rolesToMigrate.length == 0) { - rolesToMigrate = roles.keySet().toArray(new String[roles.size()]); - } - for (String roleName : rolesToMigrate) { - if (roles.containsKey(roleName) == false) { - terminal.println("no role [" + roleName + "] found, skipping"); - continue; - } else if (existingRoles.contains(roleName)) { - terminal.println("role [" + roleName + "] already exists, skipping"); - continue; - } - terminal.println("migrating role [" + roleName + "]"); - String reqBody = "n/a"; - try { - reqBody = createRoleJson(roles.get(roleName)); - String resp = postURL(env.settings(), env, "POST", - this.url.value(options) + "/_security/role/" + roleName, options, reqBody); - terminal.println(resp); - } catch (Exception e) { - throw new ElasticsearchException("failed to migrate role [" + roleName + "] with body: " + reqBody, e); - } - } - } - } - - /** - * Creates a new Logger that is detached from the ROOT logger and only has an appender that will output log messages to the terminal - */ - static Logger getTerminalLogger(final Terminal terminal) { - final Logger logger = LogManager.getLogger(ESNativeRealmMigrateTool.class); - Loggers.setLevel(logger, Level.ALL); - - final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); - final Configuration config = ctx.getConfiguration(); - - // create appender - final Appender appender = new AbstractAppender(ESNativeRealmMigrateTool.class.getName(), null, - PatternLayout.newBuilder() - // Specify the configuration so log4j doesn't re-initialize - .withConfiguration(config) - .withPattern("%m") - .build()) { - @Override - public void append(LogEvent event) { - switch (event.getLevel().getStandardLevel()) { - case FATAL: - case ERROR: - terminal.println(Verbosity.NORMAL, event.getMessage().getFormattedMessage()); - break; - case OFF: - break; - default: - terminal.println(Verbosity.VERBOSE, event.getMessage().getFormattedMessage()); - break; - } - } - }; - appender.start(); - - // get the config, detach from parent, remove appenders, add custom appender - final LoggerConfig loggerConfig = config.getLoggerConfig(ESNativeRealmMigrateTool.class.getName()); - loggerConfig.setParent(null); - loggerConfig.getAppenders().forEach((s, a) -> Loggers.removeAppender(logger, a)); - Loggers.addAppender(logger, appender); - return logger; - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 9fc75d0e3385c..7e498efa4df2e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -241,7 +241,7 @@ private Version getDefinedVersion(String username) { case RemoteMonitoringUser.NAME: return RemoteMonitoringUser.DEFINED_SINCE; default: - return Version.V_6_0_0; + return Version.CURRENT.minimumIndexCompatibilityVersion(); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java index 272ab283c75be..f5f8c8592b22a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectProviderConfiguration.java @@ -15,7 +15,6 @@ * A Class that contains all the OpenID Connect Provider configuration */ public class OpenIdConnectProviderConfiguration { - private final String providerName; private final URI authorizationEndpoint; private final URI tokenEndpoint; private final URI userinfoEndpoint; @@ -23,9 +22,8 @@ public class OpenIdConnectProviderConfiguration { private final Issuer issuer; private final String jwkSetPath; - public OpenIdConnectProviderConfiguration(String providerName, Issuer issuer, String jwkSetPath, URI authorizationEndpoint, + public OpenIdConnectProviderConfiguration(Issuer issuer, String jwkSetPath, URI authorizationEndpoint, URI tokenEndpoint, @Nullable URI userinfoEndpoint, @Nullable URI endsessionEndpoint) { - this.providerName = Objects.requireNonNull(providerName, "OP Name must be provided"); this.authorizationEndpoint = Objects.requireNonNull(authorizationEndpoint, "Authorization Endpoint must be provided"); this.tokenEndpoint = Objects.requireNonNull(tokenEndpoint, "Token Endpoint must be provided"); this.userinfoEndpoint = userinfoEndpoint; @@ -34,10 +32,6 @@ public OpenIdConnectProviderConfiguration(String providerName, Issuer issuer, St this.jwkSetPath = Objects.requireNonNull(jwkSetPath, "jwkSetUrl must be provided"); } - public String getProviderName() { - return providerName; - } - public URI getAuthorizationEndpoint() { return authorizationEndpoint; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java index 72b04951a9121..5f876a677d689 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealm.java @@ -69,7 +69,6 @@ import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_ENDSESSION_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_ISSUER; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_JWKSET_PATH; -import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_NAME; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.OP_USERINFO_ENDPOINT; import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.POPULATE_USER_METADATA; @@ -267,7 +266,6 @@ private RelyingPartyConfiguration buildRelyingPartyConfiguration(RealmConfig con } private OpenIdConnectProviderConfiguration buildOpenIdConnectProviderConfiguration(RealmConfig config) { - String providerName = require(config, OP_NAME); Issuer issuer = new Issuer(require(config, OP_ISSUER)); String jwkSetUrl = require(config, OP_JWKSET_PATH); @@ -303,7 +301,7 @@ private OpenIdConnectProviderConfiguration buildOpenIdConnectProviderConfigurati throw new SettingsException("Invalid URI: " + OP_ENDSESSION_ENDPOINT.getKey(), e); } - return new OpenIdConnectProviderConfiguration(providerName, issuer, jwkSetUrl, authorizationEndpoint, tokenEndpoint, + return new OpenIdConnectProviderConfiguration(issuer, jwkSetUrl, authorizationEndpoint, tokenEndpoint, userinfoEndpoint, endsessionEndpoint); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 1b6da7f68ca4e..bb98dddbe1ddf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -329,7 +330,12 @@ public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, } private void refreshRealms(ActionListener listener, Result result) { - String[] realmNames = this.realmsToRefresh.toArray(new String[realmsToRefresh.size()]); + if (realmsToRefresh.isEmpty()) { + listener.onResponse(result); + return; + } + + final String[] realmNames = this.realmsToRefresh.toArray(Strings.EMPTY_ARRAY); final SecurityClient securityClient = new SecurityClient(client); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, securityClient.prepareClearRealmCache().realms(realmNames).request(), @@ -340,7 +346,7 @@ private void refreshRealms(ActionListener listener, Result resu listener.onResponse(result); }, ex -> { - logger.warn("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)); + logger.warn(new ParameterizedMessage("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)), ex); listener.onFailure(ex); }), securityClient::clearRealmCache); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClusterPrivilegeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClusterPrivilegeTests.java index 2ceb14a172fe4..384401edaf510 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClusterPrivilegeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClusterPrivilegeTests.java @@ -6,12 +6,14 @@ package org.elasticsearch.integration; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.client.Request; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -139,7 +141,6 @@ public void testThatClusterPrivilegesWorkAsExpectedViaHttp() throws Exception { assertAccessIsDenied("user_d", "PUT", "/_cluster/settings", "{ \"transient\" : { \"search.default_search_timeout\": \"1m\" } }"); } - @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/38030") public void testThatSnapshotAndRestore() throws Exception { String repoJson = Strings.toString(jsonBuilder().startObject().field("type", "fs").startObject("settings").field("location", repositoryLocation.toString()).endObject().endObject()); @@ -203,6 +204,11 @@ private void waitForSnapshotToFinish(String repo, String snapshot) throws Except assertBusy(() -> { SnapshotsStatusResponse response = client().admin().cluster().prepareSnapshotStatus(repo).setSnapshots(snapshot).get(); assertThat(response.getSnapshots().get(0).getState(), is(SnapshotsInProgress.State.SUCCESS)); + // The status of the snapshot in the repository can become SUCCESS before it is fully finalized in the cluster state so wait for + // it to disappear from the cluster state as well + SnapshotsInProgress snapshotsInProgress = + client().admin().cluster().state(new ClusterStateRequest()).get().getState().custom(SnapshotsInProgress.TYPE); + assertThat(snapshotsInProgress.entries(), Matchers.empty()); }); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java index 7305b2f1902cf..1fe3eeaef52ee 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java @@ -158,10 +158,10 @@ public void testUserU1() throws Exception { assertUserIsDenied("u1", "all", "b"); assertUserIsDenied("u1", "all", "c"); assertAccessIsAllowed("u1", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u1", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u1", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u1", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u1", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -175,10 +175,10 @@ public void testUserU2() throws Exception { assertUserIsDenied("u2", "create_index", "b"); assertUserIsDenied("u2", "all", "c"); assertAccessIsAllowed("u2", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u2", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u2", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u2", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u2", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -189,10 +189,10 @@ public void testUserU3() throws Exception { assertUserIsAllowed("u3", "all", "b"); assertUserIsDenied("u3", "all", "c"); assertAccessIsAllowed("u3", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u3", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u3", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u3", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u3", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -213,10 +213,10 @@ public void testUserU4() throws Exception { assertUserIsAllowed("u4", "manage", "an_index"); assertAccessIsAllowed("u4", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u4", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u4", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsDenied("u4", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u4", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -232,10 +232,10 @@ public void testUserU5() throws Exception { assertUserIsDenied("u5", "write", "b"); assertAccessIsAllowed("u5", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u5", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u5", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsDenied("u5", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u5", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -248,10 +248,10 @@ public void testUserU6() throws Exception { assertUserIsDenied("u6", "write", "b"); assertUserIsDenied("u6", "all", "c"); assertAccessIsAllowed("u6", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u6", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u6", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u6", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u6", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -262,10 +262,10 @@ public void testUserU7() throws Exception { assertUserIsDenied("u7", "all", "b"); assertUserIsDenied("u7", "all", "c"); assertAccessIsDenied("u7", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsDenied("u7", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsDenied("u7", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsDenied("u7", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsDenied("u7", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -276,10 +276,10 @@ public void testUserU8() throws Exception { assertUserIsAllowed("u8", "all", "b"); assertUserIsAllowed("u8", "all", "c"); assertAccessIsAllowed("u8", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u8", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u8", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u8", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u8", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -293,10 +293,10 @@ public void testUserU9() throws Exception { assertUserIsDenied("u9", "write", "b"); assertUserIsDenied("u9", "all", "c"); assertAccessIsAllowed("u9", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u9", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u9", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u9", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u9", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -316,10 +316,10 @@ public void testUserU11() throws Exception { assertUserIsDenied("u11", "monitor", "c"); assertAccessIsDenied("u11", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsDenied("u11", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsDenied("u11", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertBodyHasAccessIsDenied("u11", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsDenied("u11", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -333,10 +333,10 @@ public void testUserU12() throws Exception { assertUserIsDenied("u12", "manage", "c"); assertUserIsAllowed("u12", "data_access", "c"); assertAccessIsAllowed("u12", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u12", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u12", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsAllowed("u12", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u12", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -355,10 +355,10 @@ public void testUserU13() throws Exception { assertUserIsDenied("u13", "all", "c"); assertAccessIsAllowed("u13", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); assertAccessIsAllowed("u13", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); - assertAccessIsAllowed("u13", "PUT", "/a/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); - assertBodyHasAccessIsDenied("u13", "PUT", "/b/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + assertAccessIsAllowed("u13", "PUT", "/a/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + assertBodyHasAccessIsDenied("u13", "PUT", "/b/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u13", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -377,10 +377,10 @@ public void testUserU14() throws Exception { assertUserIsDenied("u14", "all", "c"); assertAccessIsAllowed("u14", - "GET", "/" + randomIndex() + "/foo/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); - assertAccessIsAllowed("u14", "POST", "/" + randomIndex() + "/foo/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); + "GET", "/" + randomIndex() + "/_msearch", "{}\n{ \"query\" : { \"match_all\" : {} } }\n"); + assertAccessIsAllowed("u14", "POST", "/" + randomIndex() + "/_mget", "{ \"ids\" : [ \"1\", \"2\" ] } "); assertAccessIsDenied("u14", "PUT", - "/" + randomIndex() + "/foo/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); + "/" + randomIndex() + "/_bulk", "{ \"index\" : { \"_id\" : \"123\" } }\n{ \"foo\" : \"bar\" }\n"); assertAccessIsAllowed("u14", "GET", "/" + randomIndex() + "/_mtermvectors", "{ \"docs\" : [ { \"_id\": \"1\" }, { \"_id\": \"2\" } ] }"); } @@ -434,7 +434,7 @@ private void assertUserExecutes(String user, String action, String index, boolea assertAccessIsAllowed(user, "POST", "/" + index + "/_open"); assertAccessIsAllowed(user, "POST", "/" + index + "/_cache/clear"); // indexing a document to have the mapping available, and wait for green state to make sure index is created - assertAccessIsAllowed("admin", "PUT", "/" + index + "/foo/1", jsonDoc); + assertAccessIsAllowed("admin", "PUT", "/" + index + "/_doc/1", jsonDoc); assertNoTimeout(client().admin().cluster().prepareHealth(index).setWaitForGreenStatus().get()); assertAccessIsAllowed(user, "GET", "/" + index + "/_mapping/field/name"); assertAccessIsAllowed(user, "GET", "/" + index + "/_settings"); @@ -490,7 +490,7 @@ private void assertUserExecutes(String user, String action, String index, boolea assertAccessIsAllowed("admin", "GET", "/" + index + "/_refresh"); assertAccessIsAllowed(user, "GET", "/" + index + "/_count"); assertAccessIsAllowed("admin", "GET", "/" + index + "/_search"); - assertAccessIsAllowed("admin", "GET", "/" + index + "/foo/1"); + assertAccessIsAllowed("admin", "GET", "/" + index + "/_doc/1"); assertAccessIsAllowed(user, "GET", "/" + index + "/_explain/1", "{ \"query\" : { \"match_all\" : {} } }"); assertAccessIsAllowed(user, "GET", "/" + index + "/_termvectors/1"); assertUserIsAllowed(user, "search", index); @@ -513,30 +513,30 @@ private void assertUserExecutes(String user, String action, String index, boolea case "get" : if (userIsAllowed) { - assertAccessIsAllowed(user, "GET", "/" + index + "/foo/1"); + assertAccessIsAllowed(user, "GET", "/" + index + "/_doc/1"); } else { - assertAccessIsDenied(user, "GET", "/" + index + "/foo/1"); + assertAccessIsDenied(user, "GET", "/" + index + "/_doc/1"); } break; case "index" : if (userIsAllowed) { - assertAccessIsAllowed(user, "PUT", "/" + index + "/foo/321", "{ \"foo\" : \"bar\" }"); - assertAccessIsAllowed(user, "POST", "/" + index + "/foo/321/_update", "{ \"doc\" : { \"foo\" : \"baz\" } }"); + assertAccessIsAllowed(user, "PUT", "/" + index + "/_doc/321", "{ \"foo\" : \"bar\" }"); + assertAccessIsAllowed(user, "POST", "/" + index + "/_doc/321/_update", "{ \"doc\" : { \"foo\" : \"baz\" } }"); } else { - assertAccessIsDenied(user, "PUT", "/" + index + "/foo/321", "{ \"foo\" : \"bar\" }"); - assertAccessIsDenied(user, "POST", "/" + index + "/foo/321/_update", "{ \"doc\" : { \"foo\" : \"baz\" } }"); + assertAccessIsDenied(user, "PUT", "/" + index + "/_doc/321", "{ \"foo\" : \"bar\" }"); + assertAccessIsDenied(user, "POST", "/" + index + "/_doc/321/_update", "{ \"doc\" : { \"foo\" : \"baz\" } }"); } break; case "delete" : String jsonDoc = "{ \"name\" : \"docToDelete\"}"; - assertAccessIsAllowed("admin", "PUT", "/" + index + "/foo/docToDelete", jsonDoc); - assertAccessIsAllowed("admin", "PUT", "/" + index + "/foo/docToDelete2", jsonDoc); + assertAccessIsAllowed("admin", "PUT", "/" + index + "/_doc/docToDelete", jsonDoc); + assertAccessIsAllowed("admin", "PUT", "/" + index + "/_doc/docToDelete2", jsonDoc); if (userIsAllowed) { - assertAccessIsAllowed(user, "DELETE", "/" + index + "/foo/docToDelete"); + assertAccessIsAllowed(user, "DELETE", "/" + index + "/_doc/docToDelete"); } else { - assertAccessIsDenied(user, "DELETE", "/" + index + "/foo/docToDelete"); + assertAccessIsDenied(user, "DELETE", "/" + index + "/_doc/docToDelete"); } break; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 02b9cf61e4a39..c115aac11d732 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -19,11 +19,8 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.license.License.OperationMode; import org.elasticsearch.node.MockNode; @@ -36,14 +33,8 @@ import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.Netty4Plugin; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.core.security.SecurityField; -import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; -import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.junit.After; import org.junit.Before; @@ -60,7 +51,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; @@ -156,7 +146,7 @@ public void testEnableDisableBehaviour() throws Exception { assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); refresh(); - final Client client = internalCluster().transportClient(); + final Client client = internalCluster().client(); disableLicensing(); @@ -216,57 +206,6 @@ public void testRestAuthenticationByLicenseType() throws Exception { assertThat(authorizedAuthenticateResponse.getStatusLine().getStatusCode(), is(200)); } - public void testSecurityActionsByLicenseType() throws Exception { - // security actions should not work! - Settings settings = internalCluster().transportClient().settings(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - new SecurityClient(client).preparePutUser("john", "password".toCharArray(), Hasher.BCRYPT).get(); - fail("security actions should not be enabled!"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.status(), is(RestStatus.FORBIDDEN)); - assertThat(e.getMessage(), containsString("non-compliant")); - } - - // enable a license that enables security - License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.TRIAL, - License.OperationMode.PLATINUM, License.OperationMode.STANDARD, OperationMode.BASIC); - enableLicensing(mode); - // security actions should work! - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - PutUserResponse response = new SecurityClient(client).preparePutUser("john", "password".toCharArray(), Hasher.BCRYPT).get(); - assertNotNull(response); - } - } - - public void testTransportClientAuthenticationByLicenseType() throws Exception { - Settings.Builder builder = Settings.builder() - .put(internalCluster().transportClient().settings()); - // remove user info - builder.remove(SecurityField.USER_SETTING.getKey()); - builder.remove(ThreadContext.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER); - - // basic has no auth - try (TransportClient client = new TestXPackTransportClient(builder.build(), LocalStateSecurity.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - assertGreenClusterState(client); - } - - // enable a license that enables security - License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.TRIAL, - License.OperationMode.PLATINUM, License.OperationMode.STANDARD); - enableLicensing(mode); - - try (TransportClient client = new TestXPackTransportClient(builder.build(), LocalStateSecurity.class)) { - client.addTransportAddress(internalCluster().getDataNodeInstance(Transport.class).boundAddress().publishAddress()); - client.admin().cluster().prepareHealth().get(); - fail("should not have been able to connect to a node!"); - } catch (NoNodeAvailableException e) { - // expected - } - } - public void testNodeJoinWithoutSecurityExplicitlyEnabled() throws Exception { License.OperationMode mode = randomFrom(License.OperationMode.GOLD, License.OperationMode.PLATINUM, License.OperationMode.STANDARD); enableLicensing(mode); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index bd9d58e6ea549..462e4e26541e6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -243,8 +243,6 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings customSettings = customSecuritySettingsSource.nodeSettings(nodeOrdinal); builder.put(customSettings, false); // handle secure settings separately builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO); - builder.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO); Settings.Builder customBuilder = Settings.builder().put(customSettings); if (customBuilder.getSecureSettings() != null) { SecuritySettingsSource.addSecureSettings(builder, secureSettings -> diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index a8ad2fbd4aad3..09e1c00cb543e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -218,7 +218,8 @@ public static void addSSLSettingsForNodePEMFiles(Settings.Builder builder, Strin "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/openldap.crt", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"), hostnameVerificationEnabled, false); } @@ -244,7 +245,8 @@ public void addClientSSLSettings(Settings.Builder builder, String prefix) { "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", "testclient", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"), + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"), hostnameVerificationEnabled, true); } else { addSSLSettingsForStore(builder, prefix, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 02db3a1e11a46..8e6e00f32a90e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -54,7 +54,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -67,8 +66,8 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.discovery.DiscoveryModule.ZEN2_DISCOVERY_TYPE; -import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT; import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_MAIN_INDEX_FORMAT; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -253,17 +252,45 @@ public void testTLSJoinValidator() throws Exception { int numIters = randomIntBetween(1, 10); for (int i = 0; i < numIters; i++) { boolean tlsOn = randomBoolean(); + boolean securityExplicitlyEnabled = randomBoolean(); String discoveryType = randomFrom("single-node", ZEN2_DISCOVERY_TYPE, randomAlphaOfLength(4)); - Security.ValidateTLSOnJoin validator = new Security.ValidateTLSOnJoin(tlsOn, discoveryType); + + final Settings settings; + if (securityExplicitlyEnabled) { + settings = Settings.builder().put("xpack.security.enabled", true).build(); + } else { + settings = Settings.EMPTY; + } + Security.ValidateTLSOnJoin validator = new Security.ValidateTLSOnJoin(tlsOn, discoveryType, settings); MetaData.Builder builder = MetaData.builder(); - License license = TestUtils.generateSignedLicense(TimeValue.timeValueHours(24)); + License.OperationMode licenseMode = randomFrom(License.OperationMode.values()); + License license = TestUtils.generateSignedLicense(licenseMode.description(), TimeValue.timeValueHours(24)); TestUtils.putLicense(builder, license); ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metaData(builder.build()).build(); - EnumSet productionModes = EnumSet.of(License.OperationMode.GOLD, License.OperationMode.PLATINUM, - License.OperationMode.STANDARD); - if (productionModes.contains(license.operationMode()) && tlsOn == false && "single-node".equals(discoveryType) == false) { + + final boolean expectFailure; + switch (licenseMode) { + case PLATINUM: + case GOLD: + case STANDARD: + expectFailure = tlsOn == false && "single-node".equals(discoveryType) == false; + break; + case BASIC: + expectFailure = tlsOn == false && "single-node".equals(discoveryType) == false && securityExplicitlyEnabled; + break; + case MISSING: + case TRIAL: + expectFailure = false; + break; + default: + throw new AssertionError("unknown operation mode [" + license.operationMode() + "]"); + } + logger.info("Test TLS join; Lic:{} TLS:{} Disco:{} Settings:{} ; Expect Failure: {}", + licenseMode, tlsOn, discoveryType, settings.toDelimitedString(','), expectFailure); + if (expectFailure) { IllegalStateException ise = expectThrows(IllegalStateException.class, () -> validator.accept(node, state)); - assertEquals("TLS setup is required for license type [" + license.operationMode().name() + "]", ise.getMessage()); + assertEquals("Transport TLS ([xpack.security.transport.ssl.enabled]) is required for license type [" + + license.operationMode().description() + "] when security is enabled", ise.getMessage()); } else { validator.accept(node, state); } @@ -295,12 +322,15 @@ public void testIndexJoinValidator_Old_And_Rolling() throws Exception { createComponents(Settings.EMPTY); BiConsumer joinValidator = security.getJoinValidator(); assertNotNull(joinValidator); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), + VersionUtils.getPreviousVersion(Version.V_7_0_0)); DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); IndexMetaData indexMetaData = IndexMetaData.builder(SECURITY_MAIN_ALIAS) - .settings(settings(Version.V_6_1_0).put(INDEX_FORMAT_SETTING.getKey(), INTERNAL_MAIN_INDEX_FORMAT - 1)) + .settings(settings(version) + .put(INDEX_FORMAT_SETTING.getKey(), INTERNAL_MAIN_INDEX_FORMAT - 1)) .numberOfShards(1).numberOfReplicas(0) .build(); - DiscoveryNode existingOtherNode = new DiscoveryNode("bar", buildNewFakeTransportAddress(), Version.V_6_1_0); + DiscoveryNode existingOtherNode = new DiscoveryNode("bar", buildNewFakeTransportAddress(), version); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().add(existingOtherNode).build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes(discoveryNodes) @@ -318,7 +348,7 @@ public void testIndexJoinValidator_FullyCurrentCluster() throws Exception { DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); int indexFormat = randomBoolean() ? INTERNAL_MAIN_INDEX_FORMAT : INTERNAL_MAIN_INDEX_FORMAT - 1; IndexMetaData indexMetaData = IndexMetaData.builder(SECURITY_MAIN_ALIAS) - .settings(settings(Version.V_6_1_0).put(INDEX_FORMAT_SETTING.getKey(), indexFormat)) + .settings(settings(VersionUtils.randomIndexCompatibleVersion(random())).put(INDEX_FORMAT_SETTING.getKey(), indexFormat)) .numberOfShards(1).numberOfReplicas(0) .build(); DiscoveryNode existingOtherNode = new DiscoveryNode("bar", buildNewFakeTransportAddress(), Version.CURRENT); @@ -333,7 +363,7 @@ public void testIndexUpgradeValidatorWithUpToDateIndex() throws Exception { createComponents(Settings.EMPTY); BiConsumer joinValidator = security.getJoinValidator(); assertNotNull(joinValidator); - Version version = randomBoolean() ? Version.CURRENT : Version.V_6_1_0; + Version version = VersionUtils.randomIndexCompatibleVersion(random()); DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); IndexMetaData indexMetaData = IndexMetaData.builder(SECURITY_MAIN_ALIAS) .settings(settings(version).put(INDEX_FORMAT_SETTING.getKey(), INTERNAL_MAIN_INDEX_FORMAT)) @@ -352,7 +382,8 @@ public void testIndexUpgradeValidatorWithMissingIndex() throws Exception { BiConsumer joinValidator = security.getJoinValidator(); assertNotNull(joinValidator); DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); - DiscoveryNode existingOtherNode = new DiscoveryNode("bar", buildNewFakeTransportAddress(), Version.V_6_1_0); + DiscoveryNode existingOtherNode = new DiscoveryNode("bar", buildNewFakeTransportAddress(), + VersionUtils.randomCompatibleVersion(random(), Version.CURRENT)); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().add(existingOtherNode).build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes(discoveryNodes).build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 78d6e22ac3645..80d7b4b4d00ad 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -77,7 +77,7 @@ public void init() throws Exception { ClusterState state = mock(ClusterState.class); DiscoveryNodes nodes = DiscoveryNodes.builder() .add(new DiscoveryNode("id1", buildNewFakeTransportAddress(), Version.CURRENT)) - .add(new DiscoveryNode("id2", buildNewFakeTransportAddress(), Version.V_6_0_0)) + .add(new DiscoveryNode("id2", buildNewFakeTransportAddress(), Version.CURRENT.minimumCompatibilityVersion())) .build(); when(state.nodes()).thenReturn(nodes); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java index 69cedf6389f7f..0ab3c96167c2c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -47,7 +48,6 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; -import org.elasticsearch.xpack.security.authc.UserToken; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm; import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectTestCase; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; @@ -195,20 +195,21 @@ public void testLogoutInvalidatesTokens() throws Exception { final JWT signedIdToken = generateIdToken(subject, randomAlphaOfLength(8), randomAlphaOfLength(8)); final User user = new User("oidc-user", new String[]{"superuser"}, null, null, null, true); final Authentication.RealmRef realmRef = new Authentication.RealmRef(oidcRealm.name(), OpenIdConnectRealmSettings.TYPE, "node01"); - final Authentication authentication = new Authentication(user, realmRef, null); - final Map tokenMetadata = new HashMap<>(); tokenMetadata.put("id_token_hint", signedIdToken.serialize()); tokenMetadata.put("oidc_realm", REALM_NAME); + final Authentication authentication = new Authentication(user, realmRef, null, null, Authentication.AuthenticationType.REALM, + tokenMetadata); - final PlainActionFuture> future = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, tokenMetadata, true, future); - final UserToken userToken = future.actionGet().v1(); - mockGetTokenFromId(userToken, false, client); - final String tokenString = tokenService.getAccessTokenAsString(userToken); + final PlainActionFuture> future = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, tokenMetadata, future); + final String accessToken = future.actionGet().v1(); + mockGetTokenFromId(tokenService, userTokenId, authentication, false, client); final OpenIdConnectLogoutRequest request = new OpenIdConnectLogoutRequest(); - request.setToken(tokenString); + request.setToken(accessToken); final PlainActionFuture listener = new PlainActionFuture<>(); action.doExecute(mock(Task.class), request, listener); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 3f4ac8942089c..6a9c487bf2013 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; @@ -66,7 +67,6 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; -import org.elasticsearch.xpack.security.authc.UserToken; import org.elasticsearch.xpack.security.authc.saml.SamlLogoutRequestHandler; import org.elasticsearch.xpack.security.authc.saml.SamlNameId; import org.elasticsearch.xpack.security.authc.saml.SamlRealm; @@ -252,9 +252,14 @@ public void cleanup() { } public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { + final String userTokenId1 = UUIDs.randomBase64UUID(); + final String refreshToken1 = UUIDs.randomBase64UUID(); + final String userTokenId2 = UUIDs.randomBase64UUID(); + final String refreshToken2 = UUIDs.randomBase64UUID(); storeToken(logoutRequest.getNameId(), randomAlphaOfLength(10)); - final Tuple tokenToInvalidate1 = storeToken(logoutRequest.getNameId(), logoutRequest.getSession()); - final Tuple tokenToInvalidate2 = storeToken(logoutRequest.getNameId(), logoutRequest.getSession()); + final Tuple tokenToInvalidate1 = storeToken(userTokenId1, refreshToken1, logoutRequest.getNameId(), + logoutRequest.getSession()); + storeToken(userTokenId2, refreshToken2, logoutRequest.getNameId(), logoutRequest.getSession()); storeToken(new SamlNameId(NameID.PERSISTENT, randomAlphaOfLength(16), null, null, null), logoutRequest.getSession()); assertThat(indexRequests.size(), equalTo(4)); @@ -316,27 +321,27 @@ public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { assertThat(filter1.get(1), instanceOf(TermQueryBuilder.class)); assertThat(((TermQueryBuilder) filter1.get(1)).fieldName(), equalTo("refresh_token.token")); assertThat(((TermQueryBuilder) filter1.get(1)).value(), - equalTo(TokenService.unpackVersionAndPayload(tokenToInvalidate1.v2()).v2())); + equalTo(TokenService.hashTokenString(TokenService.unpackVersionAndPayload(tokenToInvalidate1.v2()).v2()))); assertThat(bulkRequests.size(), equalTo(4)); // 4 updates (refresh-token + access-token) // Invalidate refresh token 1 assertThat(bulkRequests.get(0).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(0).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + assertThat(bulkRequests.get(0).requests().get(0).id(), equalTo("token_" + TokenService.hashTokenString(userTokenId1))); UpdateRequest updateRequest1 = (UpdateRequest) bulkRequests.get(0).requests().get(0); assertThat(updateRequest1.toString().contains("refresh_token"), equalTo(true)); // Invalidate access token 1 assertThat(bulkRequests.get(1).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("token_" + TokenService.hashTokenString(userTokenId1))); UpdateRequest updateRequest2 = (UpdateRequest) bulkRequests.get(1).requests().get(0); assertThat(updateRequest2.toString().contains("access_token"), equalTo(true)); // Invalidate refresh token 2 assertThat(bulkRequests.get(2).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + TokenService.hashTokenString(userTokenId2))); UpdateRequest updateRequest3 = (UpdateRequest) bulkRequests.get(2).requests().get(0); assertThat(updateRequest3.toString().contains("refresh_token"), equalTo(true)); // Invalidate access token 2 assertThat(bulkRequests.get(3).requests().get(0), instanceOf(UpdateRequest.class)); - assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + TokenService.hashTokenString(userTokenId2))); UpdateRequest updateRequest4 = (UpdateRequest) bulkRequests.get(3).requests().get(0); assertThat(updateRequest4.toString().contains("access_token"), equalTo(true)); } @@ -359,13 +364,19 @@ private Function findTokenByRefreshToken(SearchHit[] }; } - private Tuple storeToken(SamlNameId nameId, String session) throws IOException { + private Tuple storeToken(String userTokenId, String refreshToken, SamlNameId nameId, String session) { Authentication authentication = new Authentication(new User("bob"), new RealmRef("native", NativeRealmSettings.TYPE, "node01"), null); final Map metadata = samlRealm.createTokenMetadata(nameId, session); - final PlainActionFuture> future = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, metadata, true, future); + final PlainActionFuture> future = new PlainActionFuture<>(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, metadata, future); return future.actionGet(); } + private Tuple storeToken(SamlNameId nameId, String session) { + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + return storeToken(userTokenId, refreshToken, nameId, session); + } + } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 1652122bf6e80..9b9dc79a29cd4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; @@ -55,7 +56,6 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; -import org.elasticsearch.xpack.security.authc.UserToken; import org.elasticsearch.xpack.security.authc.saml.SamlNameId; import org.elasticsearch.xpack.security.authc.saml.SamlRealm; import org.elasticsearch.xpack.security.authc.saml.SamlRealmTests; @@ -236,19 +236,21 @@ public void testLogoutInvalidatesToken() throws Exception { .map(); final User user = new User("punisher", new String[]{"superuser"}, null, null, userMetaData, true); final Authentication.RealmRef realmRef = new Authentication.RealmRef(samlRealm.name(), SamlRealmSettings.TYPE, "node01"); - final Authentication authentication = new Authentication(user, realmRef, null); - final Map tokenMetaData = samlRealm.createTokenMetadata( - new SamlNameId(NameID.TRANSIENT, nameId, null, null, null), session); + new SamlNameId(NameID.TRANSIENT, nameId, null, null, null), session); + final Authentication authentication = new Authentication(user, realmRef, null, null, Authentication.AuthenticationType.REALM, + tokenMetaData); + - final PlainActionFuture> future = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, tokenMetaData, true, future); - final UserToken userToken = future.actionGet().v1(); - mockGetTokenFromId(userToken, false, client); - final String tokenString = tokenService.getAccessTokenAsString(userToken); + final PlainActionFuture> future = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, tokenMetaData, future); + final String accessToken = future.actionGet().v1(); + mockGetTokenFromId(tokenService, userTokenId, authentication, false, client); final SamlLogoutRequest request = new SamlLogoutRequest(); - request.setToken(tokenString); + request.setToken(accessToken); final PlainActionFuture listener = new PlainActionFuture<>(); action.doExecute(mock(Task.class), request, listener); final SamlLogoutResponse response = listener.get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index f6849cae4c1cd..c8cea45037942 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -315,6 +315,7 @@ private Client waitForExpiredApiKeysRemoverTriggerReadyAndGetClient() throws Exc return internalCluster().client(nodeWithMostRecentRun); } + @AwaitsFix( bugUrl = "https://github.com/elastic/elasticsearch/issues/41747") public void testExpiredApiKeysBehaviorWhenKeysExpired1WeekBeforeAnd1DayBefore() throws Exception { Client client = waitForExpiredApiKeysRemoverTriggerReadyAndGetClient().filterWithHeader( Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index c7994888a2631..67ce5ce2b27af 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -1108,14 +1108,16 @@ public void testAuthenticateWithToken() throws Exception { User user = new User("_username", "r1"); final AtomicBoolean completed = new AtomicBoolean(false); final Authentication expected = new Authentication(user, new RealmRef("realm", "custom", "node"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { Authentication originatingAuth = new Authentication(new User("creator"), new RealmRef("test", "test", "test"), null); - tokenService.createOAuth2Tokens(expected, originatingAuth, Collections.emptyMap(), true, tokenFuture); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, expected, originatingAuth, Collections.emptyMap(), tokenFuture); } - String token = tokenService.getAccessTokenAsString(tokenFuture.get().v1()); + String token = tokenFuture.get().v1(); when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); - mockGetTokenFromId(tokenFuture.get().v1(), false, client); + mockGetTokenFromId(tokenService, userTokenId, expected, false, client); when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -1191,13 +1193,15 @@ public void testExpiredToken() throws Exception { when(securityIndex.indexExists()).thenReturn(true); User user = new User("_username", "r1"); final Authentication expected = new Authentication(user, new RealmRef("realm", "custom", "node"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { Authentication originatingAuth = new Authentication(new User("creator"), new RealmRef("test", "test", "test"), null); - tokenService.createOAuth2Tokens(expected, originatingAuth, Collections.emptyMap(), true, tokenFuture); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, expected, originatingAuth, Collections.emptyMap(), tokenFuture); } - String token = tokenService.getAccessTokenAsString(tokenFuture.get().v1()); - mockGetTokenFromId(tokenFuture.get().v1(), true, client); + String token = tokenFuture.get().v1(); + mockGetTokenFromId(tokenService, userTokenId, expected, true, client); doAnswer(invocationOnMock -> { ((Runnable) invocationOnMock.getArguments()[1]).run(); return null; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java index 6d5c6770bf2f5..2ce089f385896 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RunAsIntegTests.java @@ -5,35 +5,16 @@ */ package org.elasticsearch.xpack.security.authc; -import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; -import org.elasticsearch.xpack.security.LocalStateSecurity; -import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.junit.BeforeClass; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; public class RunAsIntegTests extends SecurityIntegTestCase { @@ -86,43 +67,6 @@ protected boolean transportSSLEnabled() { return false; } - public void testUserImpersonation() throws Exception { - try (TransportClient client = getTransportClient(Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TRANSPORT_CLIENT_USER + ":" + - SecuritySettingsSourceField.TEST_PASSWORD).build())) { - //ensure the client can connect - assertBusy(() -> assertThat(client.connectedNodes().size(), greaterThan(0))); - - // make sure the client can't get health - try { - client.admin().cluster().prepareHealth().get(); - fail("the client user should not have privileges to get the health"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.getMessage(), containsString("unauthorized")); - } - - // let's run as without authorization - try { - Map headers = Collections.singletonMap(AuthenticationServiceField.RUN_AS_USER_HEADER, - SecuritySettingsSource.TEST_USER_NAME); - client.filterWithHeader(headers) - .admin().cluster().prepareHealth().get(); - fail("run as should be unauthorized for the transport client user"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.getMessage(), containsString("unauthorized")); - assertThat(e.getMessage(), containsString("run as")); - } - - Map headers = new HashMap<>(); - headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, SecuritySettingsSource.TEST_USER_NAME); - // lets set the user - ClusterHealthResponse response = client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); - assertThat(response.isTimedOut(), is(false)); - } - } - public void testUserImpersonationUsingHttp() throws Exception { // use the transport client user and try to run as try { @@ -156,29 +100,6 @@ public void testUserImpersonationUsingHttp() throws Exception { getRestClient().performRequest(requestForUserRunAsUser(SecuritySettingsSource.TEST_USER_NAME)); } - public void testEmptyUserImpersonationHeader() throws Exception { - try (TransportClient client = getTransportClient(Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TRANSPORT_CLIENT_USER + ":" - + SecuritySettingsSourceField.TEST_PASSWORD).build())) { - //ensure the client can connect - awaitBusy(() -> { - return client.connectedNodes().size() > 0; - }); - - try { - Map headers = new HashMap<>(); - headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, ""); - - client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); - fail("run as header should not be allowed to be empty"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.getMessage(), containsString("unable to authenticate")); - } - } - } - public void testEmptyHeaderUsingHttp() throws Exception { try { getRestClient().performRequest(requestForUserRunAsUser("")); @@ -188,29 +109,6 @@ public void testEmptyHeaderUsingHttp() throws Exception { } } - public void testNonExistentRunAsUser() throws Exception { - try (TransportClient client = getTransportClient(Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TRANSPORT_CLIENT_USER + ":" + - SecuritySettingsSourceField.TEST_PASSWORD).build())) { - //ensure the client can connect - awaitBusy(() -> { - return client.connectedNodes().size() > 0; - }); - - try { - Map headers = new HashMap<>(); - headers.put("Authorization", UsernamePasswordToken.basicAuthHeaderValue(RUN_AS_USER, - new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()))); - headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "idontexist"); - - client.filterWithHeader(headers).admin().cluster().prepareHealth().get(); - fail("run as header should not accept non-existent users"); - } catch (ElasticsearchSecurityException e) { - assertThat(e.getMessage(), containsString("unauthorized")); - } - } - } - public void testNonExistentRunAsUserUsingHttp() throws Exception { try { getRestClient().performRequest(requestForUserRunAsUser("idontexist")); @@ -228,21 +126,4 @@ private static Request requestForUserRunAsUser(String user) { request.setOptions(options); return request; } - - // build our own here to better mimic an actual client... - TransportClient getTransportClient(Settings extraSettings) { - NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); - List nodes = nodeInfos.getNodes(); - assertTrue(nodes.isEmpty() == false); - TransportAddress publishAddress = randomFrom(nodes).getTransport().address().publishAddress(); - String clusterName = nodeInfos.getClusterName().value(); - - Settings settings = Settings.builder() - .put(extraSettings) - .put("cluster.name", clusterName) - .build(); - - return new TestXPackTransportClient(settings, LocalStateSecurity.class) - .addTransportAddress(publishAddress); - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/SecurityRealmSettingsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/SecurityRealmSettingsTests.java index 3bc89d29f8df7..bccee36631e3d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/SecurityRealmSettingsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/SecurityRealmSettingsTests.java @@ -72,7 +72,6 @@ protected Settings nodeSettings(int nodeOrdinal) { .put("xpack.security.authc.realms.kerberos.kerb1.order", 7) .put("xpack.security.authc.realms.kerberos.kerb1.keytab.path", kerbKeyTab.toAbsolutePath()) .put("xpack.security.authc.realms.oidc.oidc1.order", 8) - .put("xpack.security.authc.realms.oidc.oidc1.op.name", "myprovider") .put("xpack.security.authc.realms.oidc.oidc1.op.issuer", "https://the.issuer.com:8090") .put("xpack.security.authc.realms.oidc.oidc1.op.jwkset_path", jwkSet.toAbsolutePath()) .put("xpack.security.authc.realms.oidc.oidc1.op.authorization_endpoint", "https://the.issuer.com:8090/login") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 7f09444784c6d..42101b1f4ec97 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -62,10 +60,7 @@ import org.junit.Before; import org.junit.BeforeClass; -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.time.Clock; import java.time.Instant; @@ -75,7 +70,6 @@ import java.util.HashMap; import java.util.Map; -import javax.crypto.CipherOutputStream; import javax.crypto.SecretKey; import static java.time.Clock.systemUTC; @@ -169,15 +163,16 @@ public static void shutdownThreadpool() throws InterruptedException { public void testAttachAndGetToken() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getAccessTokenAsString(token)); + requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -214,16 +209,21 @@ public void testInvalidAuthorizationHeader() throws Exception { public void testRotateKey() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.2.0 nodes where the Key is actually used + if (null == oldNode) { + oldNode = addAnotherDataNodeWithVersion(this.clusterService, randomFrom(Version.V_6_7_0, Version.V_7_1_0)); + } Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -240,15 +240,18 @@ public void testRotateKey() throws Exception { assertAuthentication(authentication, serialized.getAuthentication()); } - PlainActionFuture> newTokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, newTokenFuture); - final UserToken newToken = newTokenFuture.get().v1(); - assertNotNull(newToken); - assertNotEquals(getDeprecatedAccessTokenString(tokenService, newToken), getDeprecatedAccessTokenString(tokenService, token)); + PlainActionFuture> newTokenFuture = new PlainActionFuture<>(); + final String newUserTokenId = UUIDs.randomBase64UUID(); + final String newRefreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(newUserTokenId, newRefreshToken, authentication, authentication, Collections.emptyMap(), + newTokenFuture); + final String newAccessToken = newTokenFuture.get().v1(); + assertNotNull(newAccessToken); + assertNotEquals(newAccessToken, accessToken); requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, newToken)); - mockGetTokenFromId(newToken, false); + storeTokenHeader(requestContext, newAccessToken); + mockGetTokenFromId(tokenService, newUserTokenId, authentication, false); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -267,6 +270,10 @@ private void rotateKeys(TokenService tokenService) { public void testKeyExchange() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.2.0 nodes where the Key is actually used + if (null == oldNode) { + oldNode = addAnotherDataNodeWithVersion(this.clusterService, randomFrom(Version.V_6_7_0, Version.V_7_1_0)); + } int numRotations = randomIntBetween(1, 5); for (int i = 0; i < numRotations; i++) { rotateKeys(tokenService); @@ -274,20 +281,21 @@ public void testKeyExchange() throws Exception { TokenService otherTokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); otherTokenService.refreshMetaData(tokenService.getTokenMetaData()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); otherTokenService.getAndValidateToken(requestContext, future); UserToken serialized = future.get(); - assertEquals(authentication, serialized.getAuthentication()); + assertAuthentication(serialized.getAuthentication(), authentication); } rotateKeys(tokenService); @@ -298,22 +306,27 @@ public void testKeyExchange() throws Exception { PlainActionFuture future = new PlainActionFuture<>(); otherTokenService.getAndValidateToken(requestContext, future); UserToken serialized = future.get(); - assertEquals(authentication, serialized.getAuthentication()); + assertAuthentication(serialized.getAuthentication(), authentication); } } public void testPruneKeys() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.2.0 nodes where the Key is actually used + if (null == oldNode) { + oldNode = addAnotherDataNodeWithVersion(this.clusterService, randomFrom(Version.V_6_7_0, Version.V_7_1_0)); + } Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -336,11 +349,14 @@ public void testPruneKeys() throws Exception { assertAuthentication(authentication, serialized.getAuthentication()); } - PlainActionFuture> newTokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, newTokenFuture); - final UserToken newToken = newTokenFuture.get().v1(); - assertNotNull(newToken); - assertNotEquals(getDeprecatedAccessTokenString(tokenService, newToken), getDeprecatedAccessTokenString(tokenService, token)); + PlainActionFuture> newTokenFuture = new PlainActionFuture<>(); + final String newUserTokenId = UUIDs.randomBase64UUID(); + final String newRefreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(newUserTokenId, newRefreshToken, authentication, authentication, Collections.emptyMap(), + newTokenFuture); + final String newAccessToken = newTokenFuture.get().v1(); + assertNotNull(newAccessToken); + assertNotEquals(newAccessToken, accessToken); metaData = tokenService.pruneKeys(1); tokenService.refreshMetaData(metaData); @@ -353,8 +369,8 @@ public void testPruneKeys() throws Exception { } requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, newToken)); - mockGetTokenFromId(newToken, false); + storeTokenHeader(requestContext, newAccessToken); + mockGetTokenFromId(tokenService, newUserTokenId, authentication, false); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); @@ -366,16 +382,21 @@ public void testPruneKeys() throws Exception { public void testPassphraseWorks() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.1.0 nodes where the Key is actually used + if (null == oldNode) { + oldNode = addAnotherDataNodeWithVersion(this.clusterService, randomFrom(Version.V_6_7_0, Version.V_7_1_0)); + } Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, getDeprecatedAccessTokenString(tokenService, token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -395,29 +416,40 @@ public void testPassphraseWorks() throws Exception { public void testGetTokenWhenKeyCacheHasExpired() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); + // This test only makes sense in mixed clusters with pre v7.1.0 nodes where the Key is actually used + if (null == oldNode) { + oldNode = addAnotherDataNodeWithVersion(this.clusterService, randomFrom(Version.V_6_7_0, Version.V_7_1_0)); + } Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - UserToken token = tokenFuture.get().v1(); - assertThat(getDeprecatedAccessTokenString(tokenService, token), notNullValue()); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + String accessToken = tokenFuture.get().v1(); + assertThat(accessToken, notNullValue()); tokenService.clearActiveKeyCache(); - assertThat(getDeprecatedAccessTokenString(tokenService, token), notNullValue()); + + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + accessToken = tokenFuture.get().v1(); + assertThat(accessToken, notNullValue()); } public void testInvalidatedToken() throws Exception { when(securityMainIndex.indexExists()).thenReturn(true); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - mockGetTokenFromId(token, true); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); + mockGetTokenFromId(tokenService, userTokenId, authentication, true); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, tokenService.getAccessTokenAsString(token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -436,8 +468,10 @@ private void storeTokenHeader(ThreadContext requestContext, String tokenString) public void testComputeSecretKeyIsConsistent() throws Exception { byte[] saltArr = new byte[32]; random().nextBytes(saltArr); - SecretKey key = TokenService.computeSecretKey("some random passphrase".toCharArray(), saltArr); - SecretKey key2 = TokenService.computeSecretKey("some random passphrase".toCharArray(), saltArr); + SecretKey key = + TokenService.computeSecretKey("some random passphrase".toCharArray(), saltArr, TokenService.TOKEN_SERVICE_KEY_ITERATIONS); + SecretKey key2 = + TokenService.computeSecretKey("some random passphrase".toCharArray(), saltArr, TokenService.TOKEN_SERVICE_KEY_ITERATIONS); assertArrayEquals(key.getEncoded(), key2.getEncoded()); } @@ -468,14 +502,15 @@ public void testTokenExpiry() throws Exception { ClockMock clock = ClockMock.frozen(); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, clock); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - mockGetTokenFromId(token, false); - authentication = token.getAuthentication(); + final String userTokenId = UUIDs.randomBase64UUID(); + UserToken userToken = new UserToken(userTokenId, tokenService.getTokenVersionCompatibility(), authentication, + tokenService.getExpirationTime(), Collections.emptyMap()); + mockGetTokenFromId(userToken, false); + final String accessToken = tokenService.prependVersionAndEncodeAccessToken(tokenService.getTokenVersionCompatibility(), userTokenId + ); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, tokenService.getAccessTokenAsString(token)); + storeTokenHeader(requestContext, accessToken); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { // the clock is still frozen, so the cookie should be valid @@ -519,7 +554,7 @@ public void testTokenServiceDisabled() throws Exception { TokenService tokenService = new TokenService(Settings.builder() .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), false) .build(), - Clock.systemUTC(), client, licenseState, securityMainIndex, securityTokensIndex, clusterService); + Clock.systemUTC(), client, licenseState, securityMainIndex, securityTokensIndex, clusterService); IllegalStateException e = expectThrows(IllegalStateException.class, () -> tokenService.createOAuth2Tokens(null, null, null, true, null)); assertEquals("security tokens are not enabled", e.getMessage()); @@ -577,14 +612,15 @@ public void testMalformedToken() throws Exception { public void testIndexNotAvailable() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - PlainActionFuture> tokenFuture = new PlainActionFuture<>(); - tokenService.createOAuth2Tokens(authentication, authentication, Collections.emptyMap(), true, tokenFuture); - final UserToken token = tokenFuture.get().v1(); - assertNotNull(token); - //mockGetTokenFromId(token, false); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String userTokenId = UUIDs.randomBase64UUID(); + final String refreshToken = UUIDs.randomBase64UUID(); + tokenService.createOAuth2Tokens(userTokenId, refreshToken, authentication, authentication, Collections.emptyMap(), tokenFuture); + final String accessToken = tokenFuture.get().v1(); + assertNotNull(accessToken); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(requestContext, tokenService.getAccessTokenAsString(token)); + storeTokenHeader(requestContext, accessToken); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -620,34 +656,64 @@ public void testIndexNotAvailable() throws Exception { when(tokensIndex.isAvailable()).thenReturn(true); when(tokensIndex.indexExists()).thenReturn(true); - mockGetTokenFromId(token, false); + mockGetTokenFromId(tokenService, userTokenId, authentication, false); future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); - assertEquals(future.get().getAuthentication(), token.getAuthentication()); + assertAuthentication(future.get().getAuthentication(), authentication); } } public void testGetAuthenticationWorksWithExpiredUserToken() throws Exception { TokenService tokenService = createTokenService(tokenServiceEnabledSettings, Clock.systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - UserToken expired = new UserToken(authentication, Instant.now().minus(3L, ChronoUnit.DAYS)); + final String userTokenId = UUIDs.randomBase64UUID(); + UserToken expired = new UserToken(userTokenId, tokenService.getTokenVersionCompatibility(), authentication, + Instant.now().minus(3L, ChronoUnit.DAYS), Collections.emptyMap()); mockGetTokenFromId(expired, false); - String userTokenString = tokenService.getAccessTokenAsString(expired); + final String accessToken = tokenService.prependVersionAndEncodeAccessToken(tokenService.getTokenVersionCompatibility(), userTokenId + ); PlainActionFuture>> authFuture = new PlainActionFuture<>(); - tokenService.getAuthenticationAndMetaData(userTokenString, authFuture); + tokenService.getAuthenticationAndMetaData(accessToken, authFuture); Authentication retrievedAuth = authFuture.actionGet().v1(); - assertEquals(authentication, retrievedAuth); + assertAuthentication(authentication, retrievedAuth); + } + + public void testSupercedingTokenEncryption() throws Exception { + TokenService tokenService = createTokenService(tokenServiceEnabledSettings, Clock.systemUTC()); + Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); + PlainActionFuture> tokenFuture = new PlainActionFuture<>(); + final String refrehToken = UUIDs.randomBase64UUID(); + final String newAccessToken = UUIDs.randomBase64UUID(); + final String newRefreshToken = UUIDs.randomBase64UUID(); + final byte[] iv = tokenService.getRandomBytes(TokenService.IV_BYTES); + final byte[] salt = tokenService.getRandomBytes(TokenService.SALT_BYTES); + final Version version = tokenService.getTokenVersionCompatibility(); + String encryptedTokens = tokenService.encryptSupersedingTokens(newAccessToken, newRefreshToken, refrehToken, iv, + salt); + TokenService.RefreshTokenStatus refreshTokenStatus = new TokenService.RefreshTokenStatus(false, + authentication.getUser().principal(), authentication.getAuthenticatedBy().getName(), true, Instant.now().minusSeconds(5L), + encryptedTokens, Base64.getEncoder().encodeToString(iv), Base64.getEncoder().encodeToString(salt)); + refreshTokenStatus.setVersion(version); + tokenService.decryptAndReturnSupersedingTokens(refrehToken, refreshTokenStatus, tokenFuture); + if (version.onOrAfter(TokenService.VERSION_ACCESS_TOKENS_AS_UUIDS)) { + // previous versions serialized the access token encrypted and the cipher text was different each time (due to different IVs) + assertThat(tokenService.prependVersionAndEncodeAccessToken(version, newAccessToken), equalTo(tokenFuture.get().v1())); + } + assertThat(TokenService.prependVersionAndEncodeRefreshToken(version, newRefreshToken), equalTo(tokenFuture.get().v2())); } public void testCannotValidateTokenIfLicenseDoesNotAllowTokens() throws Exception { when(licenseState.isTokenServiceAllowed()).thenReturn(true); TokenService tokenService = createTokenService(tokenServiceEnabledSettings, Clock.systemUTC()); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); - UserToken token = new UserToken(authentication, Instant.now().plusSeconds(180)); + final String userTokenId = UUIDs.randomBase64UUID(); + UserToken token = new UserToken(userTokenId, tokenService.getTokenVersionCompatibility(), authentication, + Instant.now().plusSeconds(180), Collections.emptyMap()); mockGetTokenFromId(token, false); - + final String accessToken = tokenService.prependVersionAndEncodeAccessToken(tokenService.getTokenVersionCompatibility(), userTokenId + ); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - storeTokenHeader(threadContext, tokenService.getAccessTokenAsString(token)); + storeTokenHeader(threadContext, tokenService.prependVersionAndEncodeAccessToken(token.getVersion(), accessToken)); PlainActionFuture authFuture = new PlainActionFuture<>(); when(licenseState.isTokenServiceAllowed()).thenReturn(false); @@ -660,18 +726,30 @@ private TokenService createTokenService(Settings settings, Clock clock) throws G return new TokenService(settings, clock, client, licenseState, securityMainIndex, securityTokensIndex, clusterService); } - private void mockGetTokenFromId(UserToken userToken, boolean isExpired) { - mockGetTokenFromId(userToken, isExpired, client); + private void mockGetTokenFromId(TokenService tokenService, String accessToken, Authentication authentication, boolean isExpired) { + mockGetTokenFromId(tokenService, accessToken, authentication, isExpired, client); } - public static void mockGetTokenFromId(UserToken userToken, boolean isExpired, Client client) { + public static void mockGetTokenFromId(TokenService tokenService, String userTokenId, Authentication authentication, boolean isExpired, + Client client) { doAnswer(invocationOnMock -> { GetRequest request = (GetRequest) invocationOnMock.getArguments()[0]; ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; GetResponse response = mock(GetResponse.class); - if (userToken.getId().equals(request.id().replace("token_", ""))) { + Version tokenVersion = tokenService.getTokenVersionCompatibility(); + final String possiblyHashedUserTokenId; + if (tokenVersion.onOrAfter(TokenService.VERSION_ACCESS_TOKENS_AS_UUIDS)) { + possiblyHashedUserTokenId = TokenService.hashTokenString(userTokenId); + } else { + possiblyHashedUserTokenId = userTokenId; + } + if (possiblyHashedUserTokenId.equals(request.id().replace("token_", ""))) { when(response.isExists()).thenReturn(true); Map sourceMap = new HashMap<>(); + final Authentication tokenAuth = new Authentication(authentication.getUser(), authentication.getAuthenticatedBy(), + authentication.getLookedUpBy(), tokenVersion, AuthenticationType.TOKEN, authentication.getMetadata()); + final UserToken userToken = new UserToken(possiblyHashedUserTokenId, tokenVersion, tokenAuth, + tokenService.getExpirationTime(), authentication.getMetadata()); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { userToken.toXContent(builder, ToXContent.EMPTY_PARAMS); Map accessTokenMap = new HashMap<>(); @@ -687,35 +765,42 @@ public static void mockGetTokenFromId(UserToken userToken, boolean isExpired, Cl }).when(client).get(any(GetRequest.class), any(ActionListener.class)); } + private void mockGetTokenFromId(UserToken userToken, boolean isExpired) { + doAnswer(invocationOnMock -> { + GetRequest request = (GetRequest) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + GetResponse response = mock(GetResponse.class); + final String possiblyHashedUserTokenId; + if (userToken.getVersion().onOrAfter(TokenService.VERSION_ACCESS_TOKENS_AS_UUIDS)) { + possiblyHashedUserTokenId = TokenService.hashTokenString(userToken.getId()); + } else { + possiblyHashedUserTokenId = userToken.getId(); + } + if (possiblyHashedUserTokenId.equals(request.id().replace("token_", ""))) { + when(response.isExists()).thenReturn(true); + Map sourceMap = new HashMap<>(); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + userToken.toXContent(builder, ToXContent.EMPTY_PARAMS); + Map accessTokenMap = new HashMap<>(); + Map userTokenMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), + Strings.toString(builder), false); + userTokenMap.put("id", possiblyHashedUserTokenId); + accessTokenMap.put("user_token", userTokenMap); + accessTokenMap.put("invalidated", isExpired); + sourceMap.put("access_token", accessTokenMap); + } + when(response.getSource()).thenReturn(sourceMap); + } + listener.onResponse(response); + return Void.TYPE; + }).when(client).get(any(GetRequest.class), any(ActionListener.class)); + } + public static void assertAuthentication(Authentication result, Authentication expected) { assertEquals(expected.getUser(), result.getUser()); assertEquals(expected.getAuthenticatedBy(), result.getAuthenticatedBy()); assertEquals(expected.getLookedUpBy(), result.getLookedUpBy()); assertEquals(expected.getMetadata(), result.getMetadata()); - assertEquals(AuthenticationType.TOKEN, result.getAuthenticationType()); - } - - protected String getDeprecatedAccessTokenString(TokenService tokenService, UserToken userToken) throws IOException, - GeneralSecurityException { - try (ByteArrayOutputStream os = new ByteArrayOutputStream(TokenService.MINIMUM_BASE64_BYTES); - OutputStream base64 = Base64.getEncoder().wrap(os); - StreamOutput out = new OutputStreamStreamOutput(base64)) { - out.setVersion(Version.V_7_0_0); - TokenService.KeyAndCache keyAndCache = tokenService.getActiveKeyCache(); - Version.writeVersion(Version.V_7_0_0, out); - out.writeByteArray(keyAndCache.getSalt().bytes); - out.writeByteArray(keyAndCache.getKeyHash().bytes); - final byte[] initializationVector = tokenService.getNewInitializationVector(); - out.writeByteArray(initializationVector); - try (CipherOutputStream encryptedOutput = - new CipherOutputStream(out, tokenService.getEncryptionCipher(initializationVector, keyAndCache, Version.V_7_0_0)); - StreamOutput encryptedStreamOutput = new OutputStreamStreamOutput(encryptedOutput)) { - encryptedStreamOutput.setVersion(Version.V_7_0_0); - encryptedStreamOutput.writeString(userToken.getId()); - encryptedStreamOutput.close(); - return new String(os.toByteArray(), StandardCharsets.UTF_8); - } - } } private DiscoveryNode addAnotherDataNodeWithVersion(ClusterService clusterService, Version version) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java deleted file mode 100644 index a73fc93f32e45..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc.esnative; - -import joptsimple.OptionException; -import joptsimple.OptionParser; -import joptsimple.OptionSet; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.NativeRealmIntegTestCase; -import org.elasticsearch.common.CharArrays; -import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; -import org.junit.BeforeClass; - -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; - -/** - * Integration tests for the {@code ESNativeMigrateTool} - */ -public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { - - // Randomly use SSL (or not) - private static boolean useSSL; - - @BeforeClass - public static void setSSL() { - useSSL = randomBoolean(); - } - - @Override - protected boolean addMockHttpTransport() { - return false; // enable http - } - - @Override - public Settings nodeSettings(int nodeOrdinal) { - logger.info("--> use SSL? {}", useSSL); - Settings.Builder builder = Settings.builder() - .put(super.nodeSettings(nodeOrdinal)); - addSSLSettingsForNodePEMFiles(builder, "xpack.security.http.", true); - builder.put("xpack.security.http.ssl.enabled", useSSL); - return builder.build(); - } - - @Override - protected boolean transportSSLEnabled() { - return useSSL; - } - - @Override - protected boolean shouldSetReservedUserPasswords() { - return false; - } - - private Environment nodeEnvironment() throws Exception { - return internalCluster().getInstances(Environment.class).iterator().next(); - } - - public void testRetrieveUsers() throws Exception { - final Environment nodeEnvironment = nodeEnvironment(); - String home = Environment.PATH_HOME_SETTING.get(nodeEnvironment.settings()); - Path conf = nodeEnvironment.configFile(); - SecurityClient c = new SecurityClient(client()); - logger.error("--> creating users"); - int numToAdd = randomIntBetween(1,10); - Set addedUsers = new HashSet<>(numToAdd); - for (int i = 0; i < numToAdd; i++) { - String uname = randomAlphaOfLength(5); - c.preparePutUser(uname, "s3kirt".toCharArray(), getFastStoredHashAlgoForTests(), "role1", "user").get(); - addedUsers.add(uname); - } - logger.error("--> waiting for .security index"); - ensureGreen(RestrictedIndicesNames.SECURITY_MAIN_ALIAS); - - MockTerminal t = new MockTerminal(); - String username = nodeClientUsername(); - String password = new String(CharArrays.toUtf8Bytes(nodeClientPassword().getChars()), StandardCharsets.UTF_8); - String url = getHttpURL(); - ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); - - Settings.Builder builder = Settings.builder() - .put("path.home", home) - .put("path.conf", conf.toString()) - .put("xpack.security.http.ssl.client_authentication", "none"); - addSSLSettingsForPEMFiles( - builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", - "testnode", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", - "xpack.security.http.", - Collections.singletonList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - Settings settings = builder.build(); - logger.error("--> retrieving users using URL: {}, home: {}", url, home); - - OptionParser parser = muor.getParser(); - OptionSet options = parser.parse("-u", username, "-p", password, "-U", url); - logger.info("--> options: {}", options.asMap()); - Set users = muor.getUsersThatExist(t, settings, new Environment(settings, conf), options); - logger.info("--> output: \n{}", t.getOutput()); - for (String u : addedUsers) { - assertThat("expected list to contain: " + u + ", real list: " + users, users.contains(u), is(true)); - } - } - - public void testRetrieveRoles() throws Exception { - final Environment nodeEnvironment = nodeEnvironment(); - String home = Environment.PATH_HOME_SETTING.get(nodeEnvironment.settings()); - Path conf = nodeEnvironment.configFile(); - SecurityClient c = new SecurityClient(client()); - logger.error("--> creating roles"); - int numToAdd = randomIntBetween(1,10); - Set addedRoles = new HashSet<>(numToAdd); - for (int i = 0; i < numToAdd; i++) { - String rname = randomAlphaOfLength(5); - c.preparePutRole(rname) - .cluster("all", "none") - .runAs("root", "nobody") - .addIndices(new String[] { "index" }, new String[] { "read" }, new String[] { "body", "title" }, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) - .get(); - addedRoles.add(rname); - } - logger.error("--> waiting for .security index"); - ensureGreen(RestrictedIndicesNames.SECURITY_MAIN_ALIAS); - - MockTerminal t = new MockTerminal(); - String username = nodeClientUsername(); - String password = new String(CharArrays.toUtf8Bytes(nodeClientPassword().getChars()), StandardCharsets.UTF_8); - String url = getHttpURL(); - ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); - Settings.Builder builder = Settings.builder() - .put("path.home", home) - .put("xpack.security.http.ssl.client_authentication", "none"); - addSSLSettingsForPEMFiles(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", - "testclient", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", - "xpack.security.http.", - Collections.singletonList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - Settings settings = builder.build(); - logger.error("--> retrieving roles using URL: {}, home: {}", url, home); - - OptionParser parser = muor.getParser(); - OptionSet options = parser.parse("-u", username, "-p", password, "-U", url); - Set roles = muor.getRolesThatExist(t, settings, new Environment(settings, conf), options); - logger.info("--> output: \n{}", t.getOutput()); - for (String r : addedRoles) { - assertThat("expected list to contain: " + r, roles.contains(r), is(true)); - } - } - - public void testMissingPasswordParameter() { - ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); - - final OptionException ex = expectThrows(OptionException.class, - () -> muor.getParser().parse("-u", "elastic", "-U", "http://localhost:9200")); - - assertThat(ex.getMessage(), containsString("password")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java deleted file mode 100644 index 212fd4a8dab42..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeRealmMigrateToolTests.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc.esnative; - -import joptsimple.OptionSet; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.CommandTestCase; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.cli.Terminal.Verbosity; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; - -import java.io.FileNotFoundException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.isEmptyString; - -/** - * Unit tests for the {@code ESNativeRealmMigrateTool} - */ -public class ESNativeRealmMigrateToolTests extends CommandTestCase { - - @Override - protected Command newCommand() { - return new ESNativeRealmMigrateTool() { - @Override - protected MigrateUserOrRoles newMigrateUserOrRoles() { - return new MigrateUserOrRoles() { - - @Override - protected Environment createEnv(Map settings) throws UserException { - Settings.Builder builder = Settings.builder(); - settings.forEach((k, v) -> builder.put(k, v)); - return TestEnvironment.newEnvironment(builder.build()); - } - - }; - } - }; - } - - public void testUserJson() throws Exception { - assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createUserJson(Strings.EMPTY_ARRAY, "hash".toCharArray()), - equalTo("{\"password_hash\":\"hash\",\"roles\":[]}")); - assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createUserJson(new String[]{"role1", "role2"}, "hash".toCharArray()), - equalTo("{\"password_hash\":\"hash\",\"roles\":[\"role1\",\"role2\"]}")); - } - - public void testRoleJson() throws Exception { - RoleDescriptor.IndicesPrivileges ip = RoleDescriptor.IndicesPrivileges.builder() - .indices(new String[]{"i1", "i2", "i3"}) - .privileges(new String[]{"all"}) - .grantedFields("body") - .build(); - RoleDescriptor.IndicesPrivileges[] ips = new RoleDescriptor.IndicesPrivileges[1]; - ips[0] = ip; - String[] cluster = Strings.EMPTY_ARRAY; - String[] runAs = Strings.EMPTY_ARRAY; - RoleDescriptor rd = new RoleDescriptor("rolename", cluster, ips, runAs); - assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createRoleJson(rd), - equalTo("{\"cluster\":[]," + - "\"indices\":[{\"names\":[\"i1\",\"i2\",\"i3\"]," + - "\"privileges\":[\"all\"],\"field_security\":{\"grant\":[\"body\"]}," + - "\"allow_restricted_indices\":false}]," + - "\"applications\":[]," + - "\"run_as\":[],\"metadata\":{},\"type\":\"role\"}")); - } - - public void testTerminalLogger() throws Exception { - Logger terminalLogger = ESNativeRealmMigrateTool.getTerminalLogger(terminal); - assertThat(terminal.getOutput(), isEmptyString()); - - // only error and fatal gets logged at normal verbosity - terminal.setVerbosity(Verbosity.NORMAL); - List nonLoggingLevels = new ArrayList<>(Arrays.asList(Level.values())); - nonLoggingLevels.removeAll(Arrays.asList(Level.ERROR, Level.FATAL)); - for (Level level : nonLoggingLevels) { - terminalLogger.log(level, "this level should not log " + level.name()); - assertThat(terminal.getOutput(), isEmptyString()); - } - - terminalLogger.log(Level.ERROR, "logging an error"); - assertEquals("logging an error\n", terminal.getOutput()); - terminal.reset(); - assertThat(terminal.getOutput(), isEmptyString()); - - terminalLogger.log(Level.FATAL, "logging a fatal message"); - assertEquals("logging a fatal message\n", terminal.getOutput()); - terminal.reset(); - assertThat(terminal.getOutput(), isEmptyString()); - - // everything will get logged at verbose! - terminal.setVerbosity(Verbosity.VERBOSE); - List loggingLevels = new ArrayList<>(Arrays.asList(Level.values())); - loggingLevels.remove(Level.OFF); - for (Level level : loggingLevels) { - terminalLogger.log(level, "this level should log " + level.name()); - assertEquals("this level should log " + level.name() + "\n", terminal.getOutput()); - terminal.reset(); - assertThat(terminal.getOutput(), isEmptyString()); - } - } - - public void testMissingFiles() throws Exception { - Path homeDir = createTempDir(); - Path confDir = homeDir.resolve("config"); - Path xpackConfDir = confDir; - Files.createDirectories(xpackConfDir); - - ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); - - OptionSet options = muor.getParser().parse("-u", "elastic", "-p", SecuritySettingsSourceField.TEST_PASSWORD, - "-U", "http://localhost:9200"); - Settings settings = Settings.builder().put("path.home", homeDir).build(); - Environment environment = new Environment(settings, confDir); - - MockTerminal mockTerminal = new MockTerminal(); - - FileNotFoundException fnfe = expectThrows(FileNotFoundException.class, - () -> muor.importUsers(mockTerminal, environment, options)); - assertThat(fnfe.getMessage(), containsString("users file")); - - Files.createFile(xpackConfDir.resolve("users")); - fnfe = expectThrows(FileNotFoundException.class, - () -> muor.importUsers(mockTerminal, environment, options)); - assertThat(fnfe.getMessage(), containsString("users_roles file")); - - fnfe = expectThrows(FileNotFoundException.class, - () -> muor.importRoles(mockTerminal, environment, options)); - assertThat(fnfe.getMessage(), containsString("roles.yml file")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 070ea855800f7..ea1b6483fd795 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.MockSecureSettings; @@ -167,7 +166,6 @@ private void verifySuccessfulAuthentication(boolean enabled) throws Exception { verify(usersStore, times(2)).getReservedUserInfo(eq(principal), any(ActionListener.class)); final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); verify(securityIndex, times(2)).checkMappingVersion(predicateCaptor.capture()); - verifyVersionPredicate(principal, predicateCaptor.getValue()); verifyNoMoreInteractions(usersStore); } @@ -186,7 +184,6 @@ public void testLookup() throws Exception { final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); verify(securityIndex).checkMappingVersion(predicateCaptor.capture()); - verifyVersionPredicate(principal, predicateCaptor.getValue()); PlainActionFuture future = new PlainActionFuture<>(); reservedRealm.doLookupUser("foobar", future); @@ -234,7 +231,6 @@ public void testLookupThrows() throws Exception { final ArgumentCaptor predicateCaptor = ArgumentCaptor.forClass(Predicate.class); verify(securityIndex).checkMappingVersion(predicateCaptor.capture()); - verifyVersionPredicate(principal, predicateCaptor.getValue()); verifyNoMoreInteractions(usersStore); } @@ -448,28 +444,4 @@ public static void mockGetAllReservedUserInfo(NativeUsersStore usersStore, Map versionPredicate) { - switch (principal) { - case LogstashSystemUser.NAME: - assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); - break; - case BeatsSystemUser.NAME: - assertThat(versionPredicate.test(Version.V_6_2_3), is(false)); - assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); - break; - case APMSystemUser.NAME: - assertThat(versionPredicate.test(Version.V_6_4_0), is(false)); - assertThat(versionPredicate.test(Version.V_6_5_0), is(true)); - break; - case RemoteMonitoringUser.NAME: - assertThat(versionPredicate.test(Version.V_6_4_0), is(false)); - assertThat(versionPredicate.test(Version.V_6_5_0), is(true)); - break; - default: - assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); - break; - } - assertThat(versionPredicate.test(Version.V_7_0_0), is(true)); - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java index 9d05495449805..168f608951e09 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java @@ -90,6 +90,7 @@ private RealmConfig getRealmConfig(Settings settings) { return new RealmConfig(REALM_IDENTIFIER, settings, TestEnvironment.newEnvironment(settings), threadContext); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/42267") public void testAuthenticateCaching() throws Exception { Settings settings = Settings.builder() .put(RealmSettings.realmSettingPrefix(REALM_IDENTIFIER) + "cache.hash_algo", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java index e7fdbfe558ad2..64e976d90d1e3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthenticatorTests.java @@ -633,7 +633,7 @@ public void testImplicitFlowFailsWithUnsignedJwt() throws Exception { } private OpenIdConnectProviderConfiguration getOpConfig() throws URISyntaxException { - return new OpenIdConnectProviderConfiguration("op_name", + return new OpenIdConnectProviderConfiguration( new Issuer("https://op.example.com"), "https://op.example.org/jwks.json", new URI("https://op.example.org/login"), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmSettingsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmSettingsTests.java index cd92168b3aa95..8dbf27070c492 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmSettingsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmSettingsTests.java @@ -37,7 +37,6 @@ public void testIncorrectResponseTypeThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") @@ -53,7 +52,6 @@ public void testIncorrectResponseTypeThrowsError() { public void testMissingAuthorizationEndpointThrowsError() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") @@ -73,7 +71,6 @@ public void testInvalidAuthorizationEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") @@ -90,7 +87,6 @@ public void testMissingTokenEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") @@ -108,7 +104,6 @@ public void testInvalidTokenEndpointThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "This is not a uri") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") @@ -124,7 +119,6 @@ public void testMissingJwksUrlThrowsError() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") @@ -140,7 +134,6 @@ public void testMissingIssuerThrowsError() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") @@ -153,30 +146,12 @@ public void testMissingIssuerThrowsError() { Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER))); } - public void testMissingNameTypeThrowsError() { - final Settings.Builder settingsBuilder = Settings.builder() - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); - SettingsException exception = expectThrows(SettingsException.class, () -> { - new OpenIdConnectRealm(buildConfig(settingsBuilder.build()), null, null); - }); - assertThat(exception.getMessage(), - Matchers.containsString(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME))); - } - public void testMissingRedirectUriThrowsError() { final Settings.Builder settingsBuilder = Settings.builder() .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code"); @@ -192,7 +167,6 @@ public void testMissingClientIdThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com") @@ -209,7 +183,6 @@ public void testMissingPrincipalClaimThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my") @@ -228,7 +201,6 @@ public void testPatternWithoutSettingThrowsError() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.NAME_CLAIM.getPattern()), "^(.*)$") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java index 0d26c0b442c88..151a7e1caea19 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java @@ -159,7 +159,6 @@ public void testBuildRelyingPartyConfigWithoutOpenIdScope() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") @@ -182,7 +181,6 @@ public void testBuildingAuthenticationRequest() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") @@ -205,7 +203,6 @@ public void testBuilidingAuthenticationRequestWithDefaultScope() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") @@ -236,7 +233,6 @@ public void testBuildingAuthenticationRequestWithExistingStateAndNonce() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") @@ -257,7 +253,6 @@ public void testBuildingAuthenticationRequestWithLoginHint() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectTestCase.java index df5acb0c3a72c..9c1c4e981109a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectTestCase.java @@ -41,7 +41,6 @@ protected static Settings.Builder getBasicRealmSettings() { .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.org/token") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ENDSESSION_ENDPOINT), "https://op.example.org/logout") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com") - .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_NAME), "the op") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.org/jwks.json") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub") .put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.elastic.co/cb") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java index 52c87c75a13a7..c3698f32b6e32 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java @@ -10,28 +10,19 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.test.SecuritySingleNodeTestCase; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.common.socket.SocketAccess; -import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; -import org.elasticsearch.xpack.security.LocalStateSecurity; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; - import java.net.InetSocketAddress; import java.security.SecureRandom; import java.util.Arrays; @@ -41,7 +32,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -66,8 +56,9 @@ protected Settings nodeSettings() { .put("xpack.security.http.ssl.client_authentication", sslClientAuth) .put("xpack.security.authc.realms.file.file.order", "0") .put("xpack.security.authc.realms.pki.pki1.order", "1") - .put("xpack.security.authc.realms.pki.pki1.certificate_authorities", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .putList("xpack.security.authc.realms.pki.pki1.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt").toString(), + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt").toString()) .put("xpack.security.authc.realms.pki.pki1.files.role_mapping", getDataPath("role_mapping.yml")); return builder.build(); } @@ -84,42 +75,13 @@ protected boolean enableWarningsCheck() { return false; } - public void testTransportClientCanAuthenticateViaPki() { - Settings.Builder builder = Settings.builder(); - addSSLSettingsForPEMFiles( - builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", - "testnode", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", - Arrays.asList - ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - try (TransportClient client = createTransportClient(builder.build())) { - client.addTransportAddress(randomFrom(node().injector().getInstance(Transport.class).boundAddress().boundAddresses())); - IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); - assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); - } - } - - /** - * Test uses the testclient cert which is trusted by the SSL layer BUT it is not trusted by the PKI authentication - * realm - */ - public void testTransportClientAuthenticationFailure() { - try (TransportClient client = createTransportClient(Settings.EMPTY)) { - client.addTransportAddress(randomFrom(node().injector().getInstance(Transport.class).boundAddress().boundAddresses())); - client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); - fail("transport client should not have been able to authenticate"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); - } - } - public void testRestAuthenticationViaPki() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", "testnode", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -133,7 +95,8 @@ public void testRestAuthenticationFailure() throws Exception { SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", "testclient", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -154,22 +117,6 @@ private SSLContext getRestSSLContext(String keyPath, String password, String cer return context; } - private TransportClient createTransportClient(Settings additionalSettings) { - Settings clientSettings = transportClientSettings(); - if (additionalSettings.getByPrefix("xpack.security.transport.ssl.").isEmpty() == false) { - clientSettings = clientSettings.filter(k -> k.startsWith("xpack.security.transport.ssl.") == false); - } - - Settings.Builder builder = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put(clientSettings, false) - .put(additionalSettings) - .put("cluster.name", node().settings().get("cluster.name")); - builder.remove(SecurityField.USER_SETTING.getKey()); - builder.remove("request.headers.Authorization"); - return new TestXPackTransportClient(builder.build(), LocalStateSecurity.class); - } - private String getNodeUrl() { TransportAddress transportAddress = randomFrom(node().injector().getInstance(HttpServerTransport.class) .boundAddress().boundAddresses()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index d139d99bf9ce2..c5b5e734b6505 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -5,9 +5,11 @@ */ package org.elasticsearch.xpack.security.authc.saml; +import com.sun.net.httpserver.HttpsServer; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; @@ -19,6 +21,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -52,8 +55,10 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.security.AccessController; import java.security.KeyStore; import java.security.PrivateKey; +import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PublicKey; import java.security.cert.Certificate; @@ -131,6 +136,7 @@ public void testReadIdpMetadataFromHttps() throws Exception { getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .put("xpack.security.http.ssl.certificate_authorities", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .putList("xpack.security.http.ssl.supported_protocols", getProtocols()) .put("path.home", createTempDir()) .setSecureSettings(mockSecureSettings) .build(); @@ -715,4 +721,21 @@ private void assertIdp1MetadataParsedCorrectly(EntityDescriptor descriptor) { assertEquals(SAMLConstants.SAML2_POST_BINDING_URI, ssoServices.get(0).getBinding()); assertEquals(SAMLConstants.SAML2_REDIRECT_BINDING_URI, ssoServices.get(1).getBinding()); } + + /** + * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to + * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK + */ + private static List getProtocols() { + if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + return List.of("TLSv1.2"); + } else { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + return List.of("TLSv1.2"); + } + } + return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java index 2fed720e23c09..8b30cb85fed78 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java @@ -64,6 +64,7 @@ public void stop() { } } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/42267") public void testCacheSettings() { String cachingHashAlgo = Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT); int maxUsers = randomIntBetween(10, 100); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/HasherTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/HasherTests.java index 6086dc642d22f..e51945cd90418 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/HasherTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/HasherTests.java @@ -50,6 +50,10 @@ public void testSSHA256SelfGenerated() throws Exception { testHasherSelfGenerated(Hasher.SSHA256); } + public void testSHA256SelfGenerated() throws Exception { + testHasherSelfGenerated(Hasher.SHA256); + } + public void testNoopSelfGenerated() throws Exception { testHasherSelfGenerated(Hasher.NOOP); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java index 3905bb6d3b4c1..e93b42a6d3a04 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ExpressionRoleMappingTests.java @@ -304,7 +304,7 @@ public void testSerialization() throws Exception { public void testSerializationPreV71() throws Exception { final ExpressionRoleMapping original = randomRoleMapping(false); - final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_7_0_0); + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_0_1); BytesStreamOutput output = new BytesStreamOutput(); output.setVersion(version); original.writeTo(output); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 6bb6e0c7b5854..3cca6cc4fd380 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -143,7 +143,7 @@ private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { public void testCacheClearOnIndexHealthChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); - final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, true); int expectedInvalidation = 0; // existing to no longer present @@ -180,7 +180,7 @@ public void testCacheClearOnIndexHealthChange() { public void testCacheClearOnIndexOutOfDateChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); - final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, true); store.onSecurityIndexStateChange( new SecurityIndexManager.State(Instant.now(), false, true, true, null, concreteSecurityIndexName, null), @@ -193,40 +193,59 @@ public void testCacheClearOnIndexOutOfDateChange() { assertEquals(2, numInvalidation.get()); } - private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting(AtomicInteger invalidationCounter) { + public void testCacheIsNotClearedIfNoRealmsAreAttached() { + final AtomicInteger numInvalidation = new AtomicInteger(0); + final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation, false); + + final SecurityIndexManager.State noIndexState = dummyState(null); + final SecurityIndexManager.State greenIndexState = dummyState(ClusterHealthStatus.GREEN); + store.onSecurityIndexStateChange(noIndexState, greenIndexState); + assertEquals(0, numInvalidation.get()); + } + + private NativeRoleMappingStore buildRoleMappingStoreForInvalidationTesting(AtomicInteger invalidationCounter, boolean attachRealm) { final Settings settings = Settings.builder().put("path.home", createTempDir()).build(); final ThreadPool threadPool = mock(ThreadPool.class); final ThreadContext threadContext = new ThreadContext(settings); when(threadPool.getThreadContext()).thenReturn(threadContext); + final String realmName = randomAlphaOfLengthBetween(4, 8); + final Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); doAnswer(invocationOnMock -> { + assertThat(invocationOnMock.getArguments(), Matchers.arrayWithSize(3)); + final ClearRealmCacheRequest request = (ClearRealmCacheRequest) invocationOnMock.getArguments()[1]; + assertThat(request.realms(), Matchers.arrayContaining(realmName)); + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; invalidationCounter.incrementAndGet(); listener.onResponse(new ClearRealmCacheResponse(new ClusterName("cluster"), Collections.emptyList(), Collections.emptyList())); return null; }).when(client).execute(eq(ClearRealmCacheAction.INSTANCE), any(ClearRealmCacheRequest.class), any(ActionListener.class)); - final Environment env = TestEnvironment.newEnvironment(settings); - final RealmConfig realmConfig = new RealmConfig(new RealmConfig.RealmIdentifier("ldap", getTestName()), - settings, env, threadContext); - final CachingUsernamePasswordRealm mockRealm = new CachingUsernamePasswordRealm(realmConfig, threadPool) { - @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { - listener.onResponse(AuthenticationResult.notHandled()); - } - - @Override - protected void doLookupUser(String username, ActionListener listener) { - listener.onResponse(null); - } - }; final NativeRoleMappingStore store = new NativeRoleMappingStore(Settings.EMPTY, client, mock(SecurityIndexManager.class), mock(ScriptService.class)); - store.refreshRealmOnChange(mockRealm); + + if (attachRealm) { + final Environment env = TestEnvironment.newEnvironment(settings); + final RealmConfig.RealmIdentifier identifier = new RealmConfig.RealmIdentifier("ldap", realmName); + final RealmConfig realmConfig = new RealmConfig(identifier, settings, env, threadContext); + final CachingUsernamePasswordRealm mockRealm = new CachingUsernamePasswordRealm(realmConfig, threadPool) { + @Override + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { + listener.onResponse(AuthenticationResult.notHandled()); + } + + @Override + protected void doLookupUser(String username, ActionListener listener) { + listener.onResponse(null); + } + }; + store.refreshRealmOnChange(mockRealm); + } return store; } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index 9bd69d3eb1a77..be73972f3a1e3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -43,7 +43,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.is; public class IndicesPermissionTests extends ESTestCase { @@ -214,7 +213,7 @@ public void testIndicesPrivilegesStreaming() throws IOException { assertEquals(readIndicesPrivileges, indicesPrivileges.build()); out = new BytesStreamOutput(); - out.setVersion(Version.V_6_0_0); + out.setVersion(Version.CURRENT); indicesPrivileges = RoleDescriptor.IndicesPrivileges.builder(); indicesPrivileges.grantedFields(allowed); indicesPrivileges.deniedFields(denied); @@ -224,7 +223,7 @@ public void testIndicesPrivilegesStreaming() throws IOException { indicesPrivileges.build().writeTo(out); out.close(); in = out.bytes().streamInput(); - in.setVersion(Version.V_6_0_0); + in.setVersion(Version.CURRENT); RoleDescriptor.IndicesPrivileges readIndicesPrivileges2 = new RoleDescriptor.IndicesPrivileges(in); assertEquals(readIndicesPrivileges, readIndicesPrivileges2); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 3dd5395b1fea0..6e7a9806781b5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -5,16 +5,6 @@ */ package org.elasticsearch.xpack.security.support; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; - import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.action.Action; @@ -52,12 +42,22 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; import org.elasticsearch.xpack.core.template.TemplateUtils; +import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.hamcrest.Matchers; import org.junit.Before; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; + import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_MAIN_TEMPLATE_7; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_PATTERN; import static org.hamcrest.Matchers.equalTo; @@ -429,10 +429,7 @@ public void testIndexTemplateVersionMatching() throws Exception { assertTrue(SecurityIndexManager.checkTemplateExistsAndVersionMatches( SecurityIndexManager.SECURITY_MAIN_TEMPLATE_7, clusterState, logger, - Version.V_6_0_0::before)); - assertFalse(SecurityIndexManager.checkTemplateExistsAndVersionMatches( - SecurityIndexManager.SECURITY_MAIN_TEMPLATE_7, clusterState, logger, - Version.V_6_0_0::after)); + v -> Version.CURRENT.minimumCompatibilityVersion().before(v))); } public void testUpToDateMappingsAreIdentifiedAsUpToDate() throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java index ca548251d3713..1b7a7f8d870f2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java @@ -155,7 +155,9 @@ public void testTcpHandshake() { @SuppressForbidden(reason = "Need to open socket connection") public void testRenegotiation() throws Exception { - SSLService sslService = createSSLService(); + // force TLSv1.2 since renegotiation is not supported by 1.3 + SSLService sslService = + createSSLService(Settings.builder().put("xpack.security.transport.ssl.supported_protocols", "TLSv1.2").build()); final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.transport.ssl"); SocketFactory factory = sslService.sslSocketFactory(sslConfiguration); try (SSLSocket socket = (SSLSocket) factory.createSocket()) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index f8bf1f47a38f1..8bf5bf1d0af77 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectionProfile; @@ -40,6 +39,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; @@ -80,8 +80,6 @@ protected Settings nodeSettings(int nodeOrdinal) { settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly } - SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> - secureSettings.setString("transport.profiles.client.xpack.security.ssl.keystore.secure_password", "testnode")); return settingsBuilder.build(); } @@ -112,7 +110,8 @@ public void testThatConnectionToServerTypeConnectionWorks() throws IOException, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", "testnode", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", - Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); + List.of("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); ensureStableCluster(cluster().size() + 1); @@ -151,7 +150,8 @@ public void testThatConnectionToClientTypeConnectionIsRejected() throws IOExcept "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", "testnode", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", - Collections.singletonList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); + List.of("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); TransportService instance = node.injector().getInstance(TransportService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java deleted file mode 100644 index 30208a1158075..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.transport.netty4; - -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; -import org.elasticsearch.xpack.security.LocalStateSecurity; - -import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsString; - -public class SslHostnameVerificationTests extends SecurityIntegTestCase { - - @Override - protected boolean transportSSLEnabled() { - return true; - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Settings settings = super.nodeSettings(nodeOrdinal); - Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put(settings.filter(k -> k.startsWith("xpack.security.transport.ssl.") == false), false); - Path keyPath; - Path certPath; - Path nodeCertPath; - try { - /* - * This keystore uses a cert without any subject alternative names and a CN of "Elasticsearch Test Node No SAN" - * that will not resolve to a DNS name and will always cause hostname verification failures - */ - keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); - certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); - nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - assert keyPath != null; - assert certPath != null; - assert nodeCertPath != null; - assertThat(Files.exists(certPath), is(true)); - assertThat(Files.exists(nodeCertPath), is(true)); - assertThat(Files.exists(keyPath), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - - SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode-no-subjaltname"); - }); - return settingsBuilder.put("xpack.security.transport.ssl.key", keyPath.toAbsolutePath()) - .put("xpack.security.transport.ssl.certificate", certPath.toAbsolutePath()) - .putList("xpack.security.transport.ssl.certificate_authorities", - Arrays.asList(certPath.toString(), nodeCertPath.toString())) - // disable hostname verification as this test uses certs without a valid SAN or DNS in the CN - .put("xpack.security.transport.ssl.verification_mode", "certificate") - .build(); - } - - @Override - protected Settings transportClientSettings() { - Path keyPath; - Path certPath; - Path nodeCertPath; - try { - keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); - certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); - nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - assert keyPath != null; - assert certPath != null; - assert nodeCertPath != null; - assertThat(Files.exists(certPath), is(true)); - assertThat(Files.exists(nodeCertPath), is(true)); - assertThat(Files.exists(keyPath), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - Settings settings = super.transportClientSettings(); - // remove all ssl settings - Settings.Builder builder = Settings.builder(); - builder.put(settings.filter(k -> k.startsWith("xpack.security.transport.ssl.") == false), false); - - builder.put("xpack.security.transport.ssl.verification_mode", "certificate") - .put("xpack.security.transport.ssl.key", keyPath.toAbsolutePath()) - .put("xpack.security.transport.ssl.key_passphrase", "testnode-no-subjaltname") - .put("xpack.security.transport.ssl.certificate", certPath.toAbsolutePath()) - .putList("xpack.security.transport.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())); - return builder.build(); - } - - public void testThatHostnameMismatchDeniesTransportClientConnection() throws Exception { - Transport transport = internalCluster().getDataNodeInstance(Transport.class); - TransportAddress transportAddress = transport.boundAddress().publishAddress(); - InetSocketAddress inetSocketAddress = transportAddress.address(); - - Settings settings = Settings.builder().put(transportClientSettings()) - .put("xpack.security.transport.ssl.verification_mode", "full") - .build(); - - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { - client.addTransportAddress(new TransportAddress(inetSocketAddress.getAddress(), inetSocketAddress.getPort())); - client.admin().cluster().prepareHealth().get(); - fail("Expected a NoNodeAvailableException due to hostname verification failures"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); - } - } - - public void testTransportClientConnectionIgnoringHostnameVerification() throws Exception { - Client client = internalCluster().transportClient(); - assertGreenClusterState(client); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java index 5f0f3c94e36e8..71b3d7a1990b4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java @@ -33,7 +33,9 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; public class EllipticCurveSSLTests extends SecurityIntegTestCase { @@ -106,7 +108,8 @@ public SSLSocket run() throws Exception { Certificate[] peerChain = session.getPeerCertificates(); assertEquals(1, peerChain.length); assertEquals(certs[0], peerChain[0]); - assertThat(session.getCipherSuite(), containsString("ECDSA")); + assertThat(session.getCipherSuite(), + anyOf(containsString("ECDSA"), is("TLS_AES_256_GCM_SHA384"), is("TLS_AES_128_GCM_SHA256"))); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index 5f25213beefa1..8488fe2a5e638 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -15,38 +15,26 @@ import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.LocalStateSecurity; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.TrustManagerFactory; import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; import java.security.KeyStore; import java.security.SecureRandom; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; -import java.util.Set; - -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLHandshakeException; -import javax.net.ssl.TrustManagerFactory; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; @@ -72,50 +60,6 @@ protected boolean transportSSLEnabled() { return true; } - // no SSL exception as this is the exception is returned when connecting - public void testThatUnconfiguredCiphersAreRejected() throws Exception { - Set supportedCiphers = Sets.newHashSet(SSLContext.getDefault().getSupportedSSLParameters().getCipherSuites()); - Set defaultXPackCiphers = Sets.newHashSet(XPackSettings.DEFAULT_CIPHERS); - final List unconfiguredCiphers = new ArrayList<>(Sets.difference(supportedCiphers, defaultXPackCiphers)); - Collections.shuffle(unconfiguredCiphers, random()); - assumeFalse("the unconfigured ciphers list is empty", unconfiguredCiphers.isEmpty()); - - try (TransportClient transportClient = new TestXPackTransportClient(Settings.builder() - .put(transportClientSettings()) - .put("node.name", "programmatic_transport_client") - .put("cluster.name", internalCluster().getClusterName()) - .putList("xpack.security.transport.ssl.cipher_suites", unconfiguredCiphers) - .build(), LocalStateSecurity.class)) { - - TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); - transportClient.addTransportAddress(transportAddress); - - transportClient.admin().cluster().prepareHealth().get(); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); - } - } - - public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { - assumeFalse("Can't run in a FIPS JVM as SSLv3 SSLContext not available", inFipsJvm()); - try (TransportClient transportClient = new TestXPackTransportClient(Settings.builder() - .put(transportClientSettings()) - .put("node.name", "programmatic_transport_client") - .put("cluster.name", internalCluster().getClusterName()) - .putList("xpack.security.transport.ssl.supported_protocols", new String[]{"SSLv3"}) - .build(), LocalStateSecurity.class)) { - - TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); - transportClient.addTransportAddress(transportAddress); - - transportClient.admin().cluster().prepareHealth().get(); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#")); - } - } - public void testThatConnectionToHTTPWorks() throws Exception { Settings.Builder builder = Settings.builder(); addSSLSettingsForPEMFiles( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java deleted file mode 100644 index 04c1ff03f8270..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java +++ /dev/null @@ -1,412 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.transport.ssl; - -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; -import org.elasticsearch.xpack.core.security.SecurityField; -import org.elasticsearch.xpack.core.ssl.SSLClientAuth; -import org.elasticsearch.xpack.security.LocalStateSecurity; -import org.junit.BeforeClass; - -import java.net.InetAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Collections; - -import static org.elasticsearch.test.SecuritySettingsSource.TEST_USER_NAME; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; -import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.Matchers.containsString; - -public class SslMultiPortTests extends SecurityIntegTestCase { - - private static int randomClientPort; - private static int randomNoClientAuthPort; - private static InetAddress localAddress; - - @BeforeClass - public static void getRandomPort() { - randomClientPort = randomIntBetween(49000, 65500); // ephemeral port - randomNoClientAuthPort = randomIntBetween(49000, 65500); - localAddress = InetAddress.getLoopbackAddress(); - } - - /** - * On each node sets up the following profiles: - *
    - *
  • default: testnode keypair. Requires client auth
  • - *
  • client: testnode-client-profile profile that only trusts the testclient cert. Requires client auth
  • - *
  • no_client_auth: testnode keypair. Does not require client auth
  • - *
- */ - @Override - protected Settings nodeSettings(int nodeOrdinal) { - String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); - String randomNoClientAuthPortRange = randomNoClientAuthPort + "-" + (randomNoClientAuthPort+100); - - Path trustCert; - try { - trustCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); - assertThat(Files.exists(trustCert), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - - Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); - addSSLSettingsForNodePEMFiles(builder, "transport.profiles.client.xpack.security.", true); - builder.put("transport.profiles.client.port", randomClientPortRange) - .put("transport.profiles.client.bind_host", NetworkAddress.format(localAddress)) - .put("transport.profiles.client.xpack.security.ssl.certificate_authorities", trustCert.toAbsolutePath()); - addSSLSettingsForNodePEMFiles(builder, "transport.profiles.no_client_auth.xpack.security.", true); - builder.put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) - .put("transport.profiles.no_client_auth.bind_host", NetworkAddress.format(localAddress)) - .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE); - final Settings settings = builder.build(); - logger.info("node {} settings:\n{}", nodeOrdinal, settings); - return settings; - } - - @Override - protected boolean transportSSLEnabled() { - return true; - } - - private TransportClient createTransportClient(Settings additionalSettings) { - Settings settings = Settings.builder() - .put(transportClientSettings().filter(s -> s.startsWith("xpack.security.transport.ssl") == false)) - .put("node.name", "programmatic_transport_client") - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put(additionalSettings) - .build(); - //return new TestXPackTransportClient(settings, LocalStateSecurity.class); - logger.info("transport client settings:\n{}", settings); - return new TestXPackTransportClient(settings, LocalStateSecurity.class); - } - - /** - * Uses the internal cluster's transport client to test connection to the default profile. The internal transport - * client uses the same SSL settings as the default profile so a connection should always succeed - */ - public void testThatStandardTransportClientCanConnectToDefaultProfile() throws Exception { - assertGreenClusterState(internalCluster().transportClient()); - } - - /** - * Uses a transport client with the same settings as the internal cluster transport client to test connection to the - * no_client_auth profile. The internal transport client is not used here since we are connecting to a different - * profile. Since the no_client_auth profile does not require client authentication, the standard transport client - * connection should always succeed as the settings are the same as the default profile except for the port and - * disabling the client auth requirement - */ - public void testThatStandardTransportClientCanConnectToNoClientAuthProfile() throws Exception { - try(TransportClient transportClient = new TestXPackTransportClient(Settings.builder() - .put(transportClientSettings()) - .put("xpack.security.transport.ssl.enabled", true) - .put("node.name", "programmatic_transport_client") - .put("cluster.name", internalCluster().getClusterName()) - .build(), LocalStateSecurity.class)) { - transportClient.addTransportAddress(new TransportAddress(localAddress, - getProfilePort("no_client_auth"))); - assertGreenClusterState(transportClient); - } - } - - /** - * Uses a transport client with the same settings as the internal cluster transport client to test connection to the - * client profile. The internal transport client is not used here since we are connecting to a different - * profile. The client profile requires client auth and only trusts the certificate in the testclient-client-profile - * keystore so this connection will fail as the certificate presented by the standard transport client is not trusted - * by this profile - */ - public void testThatStandardTransportClientCannotConnectToClientProfile() throws Exception { - try (TransportClient transportClient = createTransportClient(Settings.EMPTY)) { - transportClient.addTransportAddress(new TransportAddress(localAddress, getProfilePort("client"))); - transportClient.admin().cluster().prepareHealth().get(); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with a custom key pair; TransportClient only trusts the testnode - * certificate and had its own self signed certificate. This test connects to the client profile, which is only - * set to trust the testclient-client-profile certificate so the connection should always succeed - */ - public void testThatProfileTransportClientCanConnectToClientProfile() throws Exception { - Settings.Builder builder = Settings.builder(); - addSSLSettingsForPEMFiles( - builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", - "testclient-client-profile", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", - Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - try (TransportClient transportClient = createTransportClient(builder.build())) { - transportClient.addTransportAddress(new TransportAddress(localAddress, getProfilePort("client"))); - assertGreenClusterState(transportClient); - } - } - - /** - * Uses a transport client with a custom key pair; TransportClient only trusts the testnode - * certificate and had its own self signed certificate. This test connects to the no_client_auth profile, which - * uses a truststore that does not trust the testclient-client-profile certificate but does not require client - * authentication - */ - public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() throws Exception { - Settings.Builder builder = Settings.builder(); - addSSLSettingsForPEMFiles( - builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", - "testclient-client-profile", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", - Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - try (TransportClient transportClient = createTransportClient(builder.build())) { - transportClient.addTransportAddress(new TransportAddress(localAddress, - getProfilePort("no_client_auth"))); - assertGreenClusterState(transportClient); - } - } - - /** - * Uses a transport client with a custom key pair; TransportClient only trusts the testnode - * certificate and had its own self signed certificate. This test connects to the default profile, which - * uses a truststore that does not trust the testclient-client-profile certificate and requires client authentication - * so the connection should always fail - */ - public void testThatProfileTransportClientCannotConnectToDefaultProfile() throws Exception { - Settings.Builder builder = Settings.builder(); - addSSLSettingsForPEMFiles( - builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", - "testclient-client-profile", - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", - Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); - try (TransportClient transportClient = createTransportClient(builder.build())) { - TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); - transportClient.addTransportAddress(transportAddress); - transportClient.admin().cluster().prepareHealth().get(); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with SSL disabled. This test connects to the default profile, which should always fail - * as a non-ssl transport client cannot connect to a ssl profile - */ - public void testThatTransportClientCannotConnectToDefaultProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with SSL disabled. This test connects to the client profile, which should always fail - * as a non-ssl transport client cannot connect to a ssl profile - */ - public void testThatTransportClientCannotConnectToClientProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, getProfilePort("client"))); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with SSL disabled. This test connects to the no_client_auth profile, which should always fail - * as a non-ssl transport client cannot connect to a ssl profile - */ - public void testThatTransportClientCannotConnectToNoClientAuthProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, - getProfilePort("no_client_auth"))); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client that only trusts the testnode certificate. This test connects to the no_client_auth profile, - * which uses the testnode certificate and does not require to present a certificate, so this connection should always succeed - */ - public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.certificate_authorities", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, - getProfilePort("no_client_auth"))); - } - } - - /** - * Uses a transport client that only trusts the testnode certificate. This test connects to the client profile, which uses - * the testnode certificate and requires the client to present a certificate, so this connection will never work as - * the client has no certificate to present - */ - public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.certificate_authorities", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, getProfilePort("client"))); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client that only trusts the testnode certificate. This test connects to the default profile, which uses - * the testnode certificate and requires the client to present a certificate, so this connection will never work as - * the client has no certificate to present - */ - public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.certificate_authorities", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with the default JDK truststore; this truststore only trusts the known good public - * certificate authorities. This test connects to the default profile, which uses a self-signed certificate that - * will never be trusted by the default truststore so the connection should always fail - */ - public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with the default JDK truststore; this truststore only trusts the known good public - * certificate authorities. This test connects to the client profile, which uses a self-signed certificate that - * will never be trusted by the default truststore so the connection should always fail - */ - public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, getProfilePort("client"))); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - /** - * Uses a transport client with the default JDK truststore; this truststore only trusts the known good public - * certificate authorities. This test connects to the no_client_auth profile, which uses a self-signed certificate that - * will never be trusted by the default truststore so the connection should always fail - */ - public void testThatSSLTransportClientWithNoTruststoreCannotConnectToNoClientAuthProfile() throws Exception { - Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); - try (TransportClient transportClient = new TestXPackTransportClient(settings, - Collections.singletonList(LocalStateSecurity.class))) { - transportClient.addTransportAddress(new TransportAddress(localAddress, - getProfilePort("no_client_auth"))); - assertGreenClusterState(transportClient); - fail("Expected NoNodeAvailableException"); - } catch (NoNodeAvailableException e) { - assertThat(e.getMessage(), containsString("None of the configured nodes are available: [{#transport#-")); - } - } - - private static int getProfilePort(String profile) { - TransportAddress[] transportAddresses = - internalCluster().getInstance(Transport.class).profileBoundAddresses().get(profile).boundAddresses(); - for (TransportAddress address : transportAddresses) { - if (address.address().getAddress().equals(localAddress)) { - return address.address().getPort(); - } - } - throw new IllegalStateException("failed to find transport address equal to [" + NetworkAddress.format(localAddress) + "] " + - " in the following bound addresses " + Arrays.toString(transportAddresses)); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index 7075a677a26ce..ce0cc5c111265 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -11,23 +11,19 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.TestXPackTransportClient; -import org.elasticsearch.xpack.core.security.SecurityField; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; -import org.elasticsearch.xpack.security.LocalStateSecurity; import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; @@ -36,13 +32,14 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; -import java.nio.file.Files; -import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.security.SecureRandom; import java.security.cert.CertPathBuilderException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -130,47 +127,17 @@ public void testThatHttpWorksWithSslClientAuth() throws IOException { } } - public void testThatTransportWorksWithoutSslClientAuth() throws IOException { - // specify an arbitrary key and certificate - not the certs needed to connect to the transport protocol - Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem"); - Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); - Path nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - - if (Files.notExists(keyPath) || Files.notExists(certPath)) { - throw new ElasticsearchException("key or certificate path doesn't exist"); - } - - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testclient-client-profile"); - Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.security.transport.ssl.key", keyPath) - .put("xpack.security.transport.ssl.certificate", certPath) - .put("xpack.security.transport.ssl.certificate_authorities", nodeCertPath) - .setSecureSettings(secureSettings) - .put("cluster.name", internalCluster().getClusterName()) - .put(SecurityField.USER_SETTING.getKey(), transportClientUsername() + ":" + new String(transportClientPassword().getChars())) - .build(); - try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { - Transport transport = internalCluster().getDataNodeInstance(Transport.class); - TransportAddress transportAddress = transport.boundAddress().publishAddress(); - client.addTransportAddress(transportAddress); - - assertGreenClusterState(client); - } - } - private SSLContext getSSLContext() { try { String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; String nodeCertPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; + String nodeEcCertPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"; String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Arrays.asList(getDataPath - (certPath), getDataPath(nodeCertPath)))); + (certPath), getDataPath(nodeCertPath), getDataPath(nodeEcCertPath)))); KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); - SSLContext context = SSLContext.getInstance("TLSv1.2"); + SSLContext context = SSLContext.getInstance(randomFrom("TLSv1.3", "TLSv1.2")); context.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); return context; } catch (Exception e) { @@ -188,4 +155,18 @@ private byte[] toByteArray(InputStream is) throws IOException { } return baos.toByteArray(); } + + /** + * TLSv1.3 when running in a JDK prior to 11.0.3 has a race condition when multiple simultaneous connections are established. See + * JDK-8213202. This issue is not triggered when using client authentication, which we do by default for transport connections. + * However if client authentication is turned off and TLSv1.3 is used on the affected JVMs then we will hit this issue. + */ + private static List getProtocols() { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("11.0.3")) < 0) { + return List.of("TLSv1.2"); + } + return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index dd6985889d7ee..3eeaca1a3f114 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -115,6 +115,10 @@ public void testThatSSLConfigurationReloadsOnModification() throws Exception { try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); + if (socket.getSession().getProtocol().equals("TLSv1.3")) { + // blocking read for TLSv1.3 to see if the other side closed the connection + socket.getInputStream().read(); + } fail("handshake should not have been successful!"); } catch (SSLException | SocketException expected) { logger.trace("expected exception", expected); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index a89b8fcdd6981..944c3306763a6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -31,6 +31,7 @@ import javax.net.ssl.SSLSocketFactory; import java.io.IOException; import java.net.SocketException; +import java.net.SocketTimeoutException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; @@ -165,7 +166,7 @@ protected boolean transportSSLEnabled() { public void testCertificateWithTrustedNameIsAccepted() throws Exception { writeRestrictions("*.trusted"); try { - tryConnect(trustedCert); + tryConnect(trustedCert, false); } catch (SSLException | SocketException ex) { logger.warn(new ParameterizedMessage("unexpected handshake failure with certificate [{}] [{}]", trustedCert.certificate.getSubjectDN(), trustedCert.certificate.getSubjectAlternativeNames()), ex); @@ -176,7 +177,7 @@ public void testCertificateWithTrustedNameIsAccepted() throws Exception { public void testCertificateWithUntrustedNameFails() throws Exception { writeRestrictions("*.trusted"); try { - tryConnect(untrustedCert); + tryConnect(untrustedCert, true); fail("handshake should have failed, but was successful"); } catch (SSLException | SocketException ex) { // expected @@ -187,7 +188,7 @@ public void testRestrictionsAreReloaded() throws Exception { writeRestrictions("*"); assertBusy(() -> { try { - tryConnect(untrustedCert); + tryConnect(untrustedCert, false); } catch (SSLException | SocketException ex) { fail("handshake should have been successful, but failed with " + ex); } @@ -196,7 +197,7 @@ public void testRestrictionsAreReloaded() throws Exception { writeRestrictions("*.trusted"); assertBusy(() -> { try { - tryConnect(untrustedCert); + tryConnect(untrustedCert, true); fail("handshake should have failed, but was successful"); } catch (SSLException | SocketException ex) { // expected @@ -221,7 +222,7 @@ private void runResourceWatcher() { } } - private void tryConnect(CertificateInfo certificate) throws Exception { + private void tryConnect(CertificateInfo certificate, boolean shouldFail) throws Exception { Settings settings = Settings.builder() .put("path.home", createTempDir()) .put("xpack.security.transport.ssl.key", certificate.getKeyPath()) @@ -239,6 +240,16 @@ private void tryConnect(CertificateInfo certificate) throws Exception { assertThat(socket.isConnected(), is(true)); // The test simply relies on this (synchronously) connecting (or not), so we don't need a handshake handler socket.startHandshake(); + + // blocking read for TLSv1.3 to see if the other side closed the connection + if (socket.getSession().getProtocol().equals("TLSv1.3")) { + if (shouldFail) { + socket.getInputStream().read(); + } else { + socket.setSoTimeout(1000); // 1 second timeout + expectThrows(SocketTimeoutException.class, () -> socket.getInputStream().read()); + } + } } } diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks index d6dc21c1bd5ff..b4cf27dfd26a8 100644 Binary files a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks and b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks differ diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks index 832a23d2dbf09..ebe6146124e8f 100644 Binary files a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks and b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks differ diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12 b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12 index 2ac8125b58d31..0e6bcaa4f8b76 100644 Binary files a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12 and b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12 differ diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt new file mode 100644 index 0000000000000..f4c9a6a7aaca8 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt @@ -0,0 +1,13 @@ +-----BEGIN CERTIFICATE----- +MIIB7zCCAZOgAwIBAgIEcmggOzAMBggqhkjOPQQDAgUAMCIxIDAeBgNVBAMTF0Vs +YXN0aWNzZWFyY2ggVGVzdCBOb2RlMB4XDTE4MDUxNzA5MzYxMFoXDTQ1MTAwMjA5 +MzYxMFowIjEgMB4GA1UEAxMXRWxhc3RpY3NlYXJjaCBUZXN0IE5vZGUwWTATBgcq +hkjOPQIBBggqhkjOPQMBBwNCAATuZRlXGn/ROcO7yFJJ50b20YvgV3U+FpRx0nx/ +yigWj6xiEMKnWbbUnM0mKF8c3GHGk5g8OXPnbK96uj6tpMB5o4G0MIGxMB0GA1Ud +DgQWBBRNAGO77mUhG6SQvIXQTbpcFwlf2TCBjwYDVR0RBIGHMIGEgglsb2NhbGhv +c3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFpboIKbG9jYWxob3N0NIIXbG9jYWxob3N0 +NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9zdDaCF2xvY2FsaG9zdDYubG9jYWxkb21h +aW42hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMAwGCCqGSM49BAMCBQADSAAwRQIg +Z3IvdmY5LFdbxoVSs6pV2tJ5+U833Chu0+ZzPo77IVUCIQDRx1FVitVuzBpqwhSW ++Zprt2RLPllC4s4BCApGDh8i1g== +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem new file mode 100644 index 0000000000000..f8f29e47e0d35 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.pem @@ -0,0 +1,8 @@ +Bag Attributes + friendlyName: testnode_ec + localKeyID: 54 69 6D 65 20 31 35 35 36 30 33 32 36 30 37 32 33 30 +Key Attributes: +-----BEGIN PRIVATE KEY----- +MEECAQAwEwYHKoZIzj0CAQYIKoZIzj0DAQcEJzAlAgEBBCCxFwoKqfcGailZhuh0 +xEj3gssdjuEw6BasiC8+zhqHBA== +-----END PRIVATE KEY----- diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index c4719aef04ab8..1d13df3b2c32e 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -12,10 +12,11 @@ ext { // SQL dependency versions jlineVersion="3.10.0" antlrVersion="4.5.3" - + // SQL test dependency versions csvjdbcVersion="1.0.34" h2Version="1.4.197" + h2gisVersion="1.5.0" } configurations { @@ -48,6 +49,9 @@ dependencies { compile "org.antlr:antlr4-runtime:4.5.3" testCompile "org.elasticsearch.test:framework:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: ':modules:reindex', configuration: 'runtime') testCompile project(path: ':modules:parent-join', configuration: 'runtime') diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 3c7eb6b804b5a..37e0baf00aa71 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -21,10 +21,16 @@ dependencies { compile (project(':libs:x-content')) { transitive = false } + compile (project(':libs:elasticsearch-geo')) { + transitive = false + } compile project(':libs:core') runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" testCompile "org.elasticsearch.test:framework:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } dependencyLicenses { diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 52aff352ac182..51a03dad70b55 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -44,7 +44,9 @@ public enum EsType implements SQLType { INTERVAL_DAY_TO_SECOND( ExtraTypes.INTERVAL_DAY_SECOND), INTERVAL_HOUR_TO_MINUTE( ExtraTypes.INTERVAL_HOUR_MINUTE), INTERVAL_HOUR_TO_SECOND( ExtraTypes.INTERVAL_HOUR_SECOND), - INTERVAL_MINUTE_TO_SECOND(ExtraTypes.INTERVAL_MINUTE_SECOND); + INTERVAL_MINUTE_TO_SECOND(ExtraTypes.INTERVAL_MINUTE_SECOND), + GEO_POINT( ExtraTypes.GEOMETRY), + GEO_SHAPE( ExtraTypes.GEOMETRY); private final Integer type; diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java index 3df70f8e1d956..b8f09ece2f3be 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/ExtraTypes.java @@ -29,5 +29,6 @@ private ExtraTypes() {} static final int INTERVAL_HOUR_MINUTE = 111; static final int INTERVAL_HOUR_SECOND = 112; static final int INTERVAL_MINUTE_SECOND = 113; + static final int GEOMETRY = 114; } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java index 9b1ff87596798..5f2f0773ff17a 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcColumnInfo.java @@ -3,6 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + package org.elasticsearch.xpack.sql.jdbc; import java.util.Objects; @@ -89,4 +90,4 @@ public boolean equals(Object obj) { public int hashCode() { return Objects.hash(name, type, table, catalog, schema, label, displaySize); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java index 7a9154c10ac4e..c9480dbcb1c2b 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java @@ -35,7 +35,7 @@ / Additional properties can be specified either through the Properties object or in the URL. In case of duplicates, the URL wins. */ //TODO: beef this up for Security/SSL -class JdbcConfiguration extends ConnectionConfiguration { +public class JdbcConfiguration extends ConnectionConfiguration { static final String URL_PREFIX = "jdbc:es://"; public static URI DEFAULT_URI = URI.create("http://localhost:9200/"); @@ -47,7 +47,7 @@ class JdbcConfiguration extends ConnectionConfiguration { // can be out/err/url static final String DEBUG_OUTPUT_DEFAULT = "err"; - static final String TIME_ZONE = "timezone"; + public static final String TIME_ZONE = "timezone"; // follow the JDBC spec and use the JVM default... // to avoid inconsistency, the default is picked up once at startup and reused across connections // to cater to the principle of least surprise @@ -57,10 +57,13 @@ class JdbcConfiguration extends ConnectionConfiguration { static final String FIELD_MULTI_VALUE_LENIENCY = "field.multi.value.leniency"; static final String FIELD_MULTI_VALUE_LENIENCY_DEFAULT = "true"; + static final String INDEX_INCLUDE_FROZEN = "index.include.frozen"; + static final String INDEX_INCLUDE_FROZEN_DEFAULT = "false"; + // options that don't change at runtime private static final Set OPTION_NAMES = new LinkedHashSet<>( - Arrays.asList(TIME_ZONE, FIELD_MULTI_VALUE_LENIENCY, DEBUG, DEBUG_OUTPUT)); + Arrays.asList(TIME_ZONE, FIELD_MULTI_VALUE_LENIENCY, INDEX_INCLUDE_FROZEN, DEBUG, DEBUG_OUTPUT)); static { // trigger version initialization @@ -77,6 +80,7 @@ class JdbcConfiguration extends ConnectionConfiguration { // mutable ones private ZoneId zoneId; private boolean fieldMultiValueLeniency; + private boolean includeFrozen; public static JdbcConfiguration create(String u, Properties props, int loginTimeoutSeconds) throws JdbcSQLException { URI uri = parseUrl(u); @@ -159,6 +163,8 @@ private JdbcConfiguration(URI baseURI, String u, Properties props) throws JdbcSQ s -> TimeZone.getTimeZone(s).toZoneId().normalized()); this.fieldMultiValueLeniency = parseValue(FIELD_MULTI_VALUE_LENIENCY, props.getProperty(FIELD_MULTI_VALUE_LENIENCY, FIELD_MULTI_VALUE_LENIENCY_DEFAULT), Boolean::parseBoolean); + this.includeFrozen = parseValue(INDEX_INCLUDE_FROZEN, props.getProperty(INDEX_INCLUDE_FROZEN, INDEX_INCLUDE_FROZEN_DEFAULT), + Boolean::parseBoolean); } @Override @@ -186,6 +192,10 @@ public boolean fieldMultiValueLeniency() { return fieldMultiValueLeniency; } + public boolean indexIncludeFrozen() { + return includeFrozen; + } + public static boolean canAccept(String url) { return (StringUtils.hasText(url) && url.trim().startsWith(JdbcConfiguration.URL_PREFIX)); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcHttpClient.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcHttpClient.java index 8bf3811ecb742..55d96afaf7105 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcHttpClient.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcHttpClient.java @@ -63,7 +63,8 @@ Cursor query(String sql, List params, RequestMeta meta) thro Boolean.FALSE, null, new RequestInfo(Mode.JDBC), - conCfg.fieldMultiValueLeniency()); + conCfg.fieldMultiValueLeniency(), + conCfg.indexIncludeFrozen()); SqlQueryResponse response = httpClient.query(sqlRequest); return new DefaultCursor(this, response.cursor(), toJdbcColumnInfo(response.columns()), response.rows(), meta); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java index 041c457d91b3d..39d942362d731 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java @@ -190,7 +190,7 @@ public void setObject(int parameterIndex, Object x) throws SQLException { setParam(parameterIndex, null, EsType.NULL); return; } - + // check also here the unsupported types so that any unsupported interfaces ({@code java.sql.Struct}, // {@code java.sql.Array} etc) will generate the correct exception message. Otherwise, the method call // {@code TypeConverter.fromJavaToJDBC(x.getClass())} will report the implementing class as not being supported. @@ -330,7 +330,7 @@ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLE public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { setObject(parameterIndex, xmlObject); } - + @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { setObject(parameterIndex, x, TypeUtils.asSqlType(targetSqlType), scaleOrLength); @@ -343,13 +343,12 @@ public void setObject(int parameterIndex, Object x, SQLType targetSqlType, int s private void setObject(int parameterIndex, Object x, EsType dataType, String typeString) throws SQLException { checkOpen(); - // set the null value on the type and exit if (x == null) { setParam(parameterIndex, null, dataType); return; } - + checkKnownUnsupportedTypes(x); if (x instanceof byte[]) { if (dataType != EsType.BINARY) { @@ -359,7 +358,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ setParam(parameterIndex, x, EsType.BINARY); return; } - + if (x instanceof Timestamp || x instanceof Calendar || x instanceof Date @@ -380,7 +379,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ LocalDateTime ldt = (LocalDateTime) x; Calendar cal = getDefaultCalendar(); cal.set(ldt.getYear(), ldt.getMonthValue() - 1, ldt.getDayOfMonth(), ldt.getHour(), ldt.getMinute(), ldt.getSecond()); - + dateToSet = cal.getTime(); } else if (x instanceof Time) { dateToSet = new java.util.Date(((Time) x).getTime()); @@ -398,7 +397,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ throw new SQLFeatureNotSupportedException( "Conversion from type [" + x.getClass().getName() + "] to [" + typeString + "] not supported"); } - + if (x instanceof Boolean || x instanceof Byte || x instanceof Short @@ -412,7 +411,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ dataType); return; } - + throw new SQLFeatureNotSupportedException( "Conversion from type [" + x.getClass().getName() + "] to [" + typeString + "] not supported"); } @@ -421,14 +420,14 @@ private void checkKnownUnsupportedTypes(Object x) throws SQLFeatureNotSupportedE List> unsupportedTypes = new ArrayList<>(Arrays.asList(Struct.class, Array.class, SQLXML.class, RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, OffsetTime.class, OffsetDateTime.class, URL.class, BigDecimal.class)); - + for (Class clazz:unsupportedTypes) { if (clazz.isAssignableFrom(x.getClass())) { throw new SQLFeatureNotSupportedException("Objects of type [" + clazz.getName() + "] are not supported"); } } } - + private Calendar getDefaultCalendar() { return Calendar.getInstance(cfg.timeZone(), Locale.ROOT); } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index 9c30241ccbdb1..7e21f2206b1e9 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -5,13 +5,16 @@ */ package org.elasticsearch.xpack.sql.jdbc; +import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.proto.StringUtils; +import java.io.IOException; import java.sql.Date; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; import java.sql.Timestamp; +import java.text.ParseException; import java.time.Duration; import java.time.LocalDate; import java.time.LocalDateTime; @@ -100,6 +103,7 @@ private static T dateTimeConvert(Long millis, Calendar c, Function tables, String user) throws Exception String tablesOutput = cli.command("SHOW TABLES"); assertThat(tablesOutput, containsString("name")); assertThat(tablesOutput, containsString("type")); - assertEquals("---------------+---------------", cli.readLine()); + assertThat(tablesOutput, containsString("kind")); + assertEquals("---------------+---------------+---------------", cli.readLine()); for (String table : tables) { String line = null; /* diff --git a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java index 6a4a2662810e3..0829eed2da32b 100644 --- a/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java +++ b/x-pack/plugin/sql/qa/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java @@ -122,6 +122,7 @@ public void expectShowTables(List tables, String user) throws Exception List columns = new ArrayList<>(); columns.add(columnInfo(mode, "name", "keyword", JDBCType.VARCHAR, 32766)); columns.add(columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 32766)); + columns.add(columnInfo(mode, "kind", "keyword", JDBCType.VARCHAR, 32766)); Map expected = new HashMap<>(); expected.put("columns", columns); List> rows = new ArrayList<>(); @@ -129,6 +130,7 @@ public void expectShowTables(List tables, String user) throws Exception List fields = new ArrayList<>(); fields.add(table); fields.add("BASE TABLE"); + fields.add("INDEX"); rows.add(fields); } expected.put("rows", rows); diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java new file mode 100644 index 0000000000000..8f5352304ed50 --- /dev/null +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcCsvSpecIT.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.xpack.sql.qa.geo.GeoCsvSpecTestCase; +import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.specParser; + +public class GeoJdbcCsvSpecIT extends GeoCsvSpecTestCase { + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + List list = new ArrayList<>(); + list.addAll(GeoCsvSpecTestCase.readScriptSpec()); + list.addAll(readScriptSpec("/single-node-only/command-sys-geo.csv-spec", specParser())); + return list; + } + + public GeoJdbcCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber, testCase); + } +} diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java new file mode 100644 index 0000000000000..2a9a1592c71d0 --- /dev/null +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/GeoJdbcSqlSpecIT.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.xpack.sql.qa.geo.GeoSqlSpecTestCase; + +public class GeoJdbcSqlSpecIT extends GeoSqlSpecTestCase { + public GeoJdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) { + super(fileName, groupName, testName, lineNumber, query); + } +} diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java index f742b1304a79e..428bc1c21ef8f 100644 --- a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcCsvSpecIT.java @@ -17,12 +17,12 @@ public class JdbcCsvSpecIT extends CsvSpecTestCase { - @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() throws Exception { List list = new ArrayList<>(); list.addAll(CsvSpecTestCase.readScriptSpec()); - return readScriptSpec("/single-node-only/command-sys.csv-spec", specParser()); + list.addAll(readScriptSpec("/single-node-only/command-sys.csv-spec", specParser())); + return list; } public JdbcCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { diff --git a/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java new file mode 100644 index 0000000000000..b8506cc5e1e86 --- /dev/null +++ b/x-pack/plugin/sql/qa/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcFrozenCsvSpecIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.sql.qa.jdbc.CsvSpecTestCase; +import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; + +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.specParser; + +public class JdbcFrozenCsvSpecIT extends CsvSpecTestCase { + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + return readScriptSpec("/slow/frozen.csv-spec", specParser()); + } + + @Override + protected Properties connectionProperties() { + Properties props = new Properties(super.connectionProperties()); + String timeout = String.valueOf(TimeUnit.MINUTES.toMillis(5)); + props.setProperty("connect.timeout", timeout); + props.setProperty("network.timeout", timeout); + props.setProperty("query.timeout", timeout); + props.setProperty("page.timeout", timeout); + + return props; + } + + + public JdbcFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber, testCase); + } +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java new file mode 100644 index 0000000000000..e40e6de9e3a9c --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoCsvSpecTestCase.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.geo; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.client.Request; +import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; +import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; +import org.elasticsearch.xpack.sql.jdbc.JdbcConfiguration; +import org.junit.Before; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.csvConnection; +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.executeCsvQuery; +import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.specParser; + +/** + * Tests comparing sql queries executed against our jdbc client + * with hard coded result sets. + */ +public abstract class GeoCsvSpecTestCase extends SpecBaseIntegrationTestCase { + private final CsvTestCase testCase; + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + Parser parser = specParser(); + List tests = new ArrayList<>(); + tests.addAll(readScriptSpec("/ogc/ogc.csv-spec", parser)); + tests.addAll(readScriptSpec("/geo/geosql.csv-spec", parser)); + tests.addAll(readScriptSpec("/docs/geo.csv-spec", parser)); + return tests; + } + + public GeoCsvSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber); + this.testCase = testCase; + } + + + @Before + public void setupTestGeoDataIfNeeded() throws Exception { + if (client().performRequest(new Request("HEAD", "/ogc")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadOGCDatasetIntoEs(client(), "ogc"); + } + if (client().performRequest(new Request("HEAD", "/geo")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadGeoDatasetIntoEs(client(), "geo"); + } + } + + @Override + protected final void doTest() throws Throwable { + try (Connection csv = csvConnection(testCase); + Connection es = esJdbc()) { + + // pass the testName as table for debugging purposes (in case the underlying reader is missing) + ResultSet expected = executeCsvQuery(csv, testName); + ResultSet elasticResults = executeJdbcQuery(es, testCase.query); + assertResults(expected, elasticResults); + } + } + + // make sure ES uses UTC (otherwise JDBC driver picks up the JVM timezone per spec/convention) + @Override + protected Properties connectionProperties() { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); + return connectionProperties; + } + +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java new file mode 100644 index 0000000000000..40e8f64be87cc --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoDataLoader.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.qa.geo; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.sql.qa.jdbc.SqlSpecTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.elasticsearch.xpack.sql.qa.jdbc.DataLoader.createString; +import static org.elasticsearch.xpack.sql.qa.jdbc.DataLoader.readFromJarUrl; + +public class GeoDataLoader { + + public static void main(String[] args) throws Exception { + try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { + loadOGCDatasetIntoEs(client, "ogc"); + loadGeoDatasetIntoEs(client, "geo"); + Loggers.getLogger(GeoDataLoader.class).info("Geo data loaded"); + } + } + + protected static void loadOGCDatasetIntoEs(RestClient client, String index) throws Exception { + createIndex(client, index, createOGCIndexRequest()); + loadData(client, index, readResource("/ogc/ogc.json")); + makeFilteredAlias(client, "lakes", index, "\"term\" : { \"ogc_type\" : \"lakes\" }"); + makeFilteredAlias(client, "road_segments", index, "\"term\" : { \"ogc_type\" : \"road_segments\" }"); + makeFilteredAlias(client, "divided_routes", index, "\"term\" : { \"ogc_type\" : \"divided_routes\" }"); + makeFilteredAlias(client, "forests", index, "\"term\" : { \"ogc_type\" : \"forests\" }"); + makeFilteredAlias(client, "bridges", index, "\"term\" : { \"ogc_type\" : \"bridges\" }"); + makeFilteredAlias(client, "streams", index, "\"term\" : { \"ogc_type\" : \"streams\" }"); + makeFilteredAlias(client, "buildings", index, "\"term\" : { \"ogc_type\" : \"buildings\" }"); + makeFilteredAlias(client, "ponds", index, "\"term\" : { \"ogc_type\" : \"ponds\" }"); + makeFilteredAlias(client, "named_places", index, "\"term\" : { \"ogc_type\" : \"named_places\" }"); + makeFilteredAlias(client, "map_neatlines", index, "\"term\" : { \"ogc_type\" : \"map_neatlines\" }"); + } + + private static String createOGCIndexRequest() throws Exception { + XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); + createIndex.startObject("settings"); + { + createIndex.field("number_of_shards", 1); + } + createIndex.endObject(); + createIndex.startObject("mappings"); + { + createIndex.startObject("properties"); + { + // Common + createIndex.startObject("ogc_type").field("type", "keyword").endObject(); + createIndex.startObject("fid").field("type", "integer").endObject(); + createString("name", createIndex); + + // Type specific + createIndex.startObject("shore").field("type", "geo_shape").endObject(); // lakes + + createString("aliases", createIndex); // road_segments + createIndex.startObject("num_lanes").field("type", "integer").endObject(); // road_segments, divided_routes + createIndex.startObject("centerline").field("type", "geo_shape").endObject(); // road_segments, streams + + createIndex.startObject("centerlines").field("type", "geo_shape").endObject(); // divided_routes + + createIndex.startObject("boundary").field("type", "geo_shape").endObject(); // forests, named_places + + createIndex.startObject("position").field("type", "geo_shape").endObject(); // bridges, buildings + + createString("address", createIndex); // buildings + createIndex.startObject("footprint").field("type", "geo_shape").endObject(); // buildings + + createIndex.startObject("type").field("type", "keyword").endObject(); // ponds + createIndex.startObject("shores").field("type", "geo_shape").endObject(); // ponds + + createIndex.startObject("neatline").field("type", "geo_shape").endObject(); // map_neatlines + + } + createIndex.endObject(); + } + createIndex.endObject().endObject(); + return Strings.toString(createIndex); + } + + private static void createIndex(RestClient client, String index, String settingsMappings) throws IOException { + Request createIndexRequest = new Request("PUT", "/" + index); + createIndexRequest.setEntity(new StringEntity(settingsMappings, ContentType.APPLICATION_JSON)); + client.performRequest(createIndexRequest); + } + + static void loadGeoDatasetIntoEs(RestClient client, String index) throws Exception { + createIndex(client, index, readResource("/geo/geosql.json")); + loadData(client, index, readResource("/geo/geosql-bulk.json")); + } + + private static void loadData(RestClient client, String index, String bulk) throws IOException { + Request request = new Request("POST", "/" + index + "/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(bulk); + Response response = client.performRequest(request); + + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + throw new RuntimeException("Cannot load data " + response.getStatusLine()); + } + + String bulkResponseStr = EntityUtils.toString(response.getEntity()); + Map bulkResponseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, bulkResponseStr, false); + + if ((boolean) bulkResponseMap.get("errors")) { + throw new RuntimeException("Failed to load bulk data " + bulkResponseStr); + } + } + + + public static void makeFilteredAlias(RestClient client, String aliasName, String index, String filter) throws Exception { + Request request = new Request("POST", "/" + index + "/_alias/" + aliasName); + request.setJsonEntity("{\"filter\" : { " + filter + " } }"); + client.performRequest(request); + } + + private static String readResource(String location) throws IOException { + URL dataSet = SqlSpecTestCase.class.getResource(location); + if (dataSet == null) { + throw new IllegalArgumentException("Can't find [" + location + "]"); + } + StringBuilder builder = new StringBuilder(); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(readFromJarUrl(dataSet), StandardCharsets.UTF_8))) { + String line = reader.readLine(); + while(line != null) { + if (line.trim().startsWith("//") == false) { + builder.append(line); + builder.append('\n'); + } + line = reader.readLine(); + } + return builder.toString(); + } + } + +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java new file mode 100644 index 0000000000000..ec97cab6f10b1 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/geo/GeoSqlSpecTestCase.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.qa.geo; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.client.Request; +import org.elasticsearch.xpack.sql.qa.jdbc.LocalH2; +import org.elasticsearch.xpack.sql.qa.jdbc.SpecBaseIntegrationTestCase; +import org.elasticsearch.xpack.sql.jdbc.JdbcConfiguration; +import org.h2gis.functions.factory.H2GISFunctions; +import org.junit.Before; +import org.junit.ClassRule; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.text.NumberFormat; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Properties; + +/** + * Tests comparing geo sql queries executed against our jdbc client + * with those executed against H2GIS's jdbc client. + */ +public abstract class GeoSqlSpecTestCase extends SpecBaseIntegrationTestCase { + private String query; + + @ClassRule + public static LocalH2 H2 = new LocalH2((c) -> { + assumeTrue("JTS inside H2 is using default local for toUpperCase() in string comparison making it fail to parse WKT on certain" + + " locales", "point".toUpperCase(Locale.getDefault()).equals("POINT")); + // Load GIS extensions + H2GISFunctions.load(c); + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/ogc/sqltsch.sql'"); + c.createStatement().execute("RUNSCRIPT FROM 'classpath:/geo/setup_test_geo.sql'"); + }); + + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + Parser parser = new SqlSpecParser(); + List tests = new ArrayList<>(); + tests.addAll(readScriptSpec("/ogc/ogc.sql-spec", parser)); + tests.addAll(readScriptSpec("/geo/geosql.sql-spec", parser)); + return tests; + } + + @Before + public void setupTestGeoDataIfNeeded() throws Exception { + assumeTrue("Cannot support locales that don't use Hindu-Arabic numerals and non-ascii - sign due to H2", + "-42".equals(NumberFormat.getInstance(Locale.getDefault()).format(-42))); + if (client().performRequest(new Request("HEAD", "/ogc")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadOGCDatasetIntoEs(client(), "ogc"); + } + if (client().performRequest(new Request("HEAD", "/geo")).getStatusLine().getStatusCode() == 404) { + GeoDataLoader.loadGeoDatasetIntoEs(client(), "geo"); + } + } + + + private static class SqlSpecParser implements Parser { + @Override + public Object parse(String line) { + return line.endsWith(";") ? line.substring(0, line.length() - 1) : line; + } + } + + public GeoSqlSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, String query) { + super(fileName, groupName, testName, lineNumber); + this.query = query; + } + + @Override + protected final void doTest() throws Throwable { + try (Connection h2 = H2.get(); + Connection es = esJdbc()) { + + ResultSet expected, elasticResults; + expected = executeJdbcQuery(h2, query); + elasticResults = executeJdbcQuery(es, query); + + assertResults(expected, elasticResults); + } + } + + // TODO: use UTC for now until deciding on a strategy for handling date extraction + @Override + protected Properties connectionProperties() { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); + return connectionProperties; + } +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java index 40ce2416c249c..00bf9030dbd7a 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvSpecTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.qa.jdbc; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase; @@ -42,14 +43,8 @@ public CsvSpecTestCase(String fileName, String groupName, String testName, Integ protected final void doTest() throws Throwable { // Run the time tests always in UTC // TODO: https://github.com/elastic/elasticsearch/issues/40779 - if ("time".equals(groupName)) { - try (Connection csv = csvConnection(testCase); Connection es = esJdbc(connectionProperties())) { - executeAndAssert(csv, es); - } - } else { - try (Connection csv = csvConnection(testCase); Connection es = esJdbc()) { - executeAndAssert(csv, es); - } + try (Connection csv = csvConnection(testCase); Connection es = esJdbc()) { + executeAndAssert(csv, es); } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java index 6376bd13308d6..daa4e5b4d0c87 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/CsvTestUtils.java @@ -46,7 +46,7 @@ private CsvTestUtils() { */ public static ResultSet executeCsvQuery(Connection csv, String csvTableName) throws SQLException { ResultSet expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY) - .executeQuery("SELECT * FROM " + csvTableName); + .executeQuery("SELECT * FROM " + csvTableName); // trigger data loading for type inference expected.beforeFirst(); return expected; @@ -187,13 +187,13 @@ public Object parse(String line) { } else { if (line.endsWith(";")) { - // pick up the query - testCase = new CsvTestCase(); - query.append(line.substring(0, line.length() - 1).trim()); - testCase.query = query.toString(); - testCase.earlySchema = earlySchema.toString(); - earlySchema.setLength(0); - query.setLength(0); + // pick up the query + testCase = new CsvTestCase(); + query.append(line.substring(0, line.length() - 1).trim()); + testCase.query = query.toString(); + testCase.earlySchema = earlySchema.toString(); + earlySchema.setLength(0); + query.setLength(0); } // keep reading the query else { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java index b12203294c158..ff50a33a0afe8 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/DataLoader.java @@ -49,15 +49,21 @@ protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception { loadLogsDatasetIntoEs(client, "logs", "logs"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); + // frozen index + loadEmpDatasetIntoEs(client, "frozen_emp", "employees"); + freeze(client, "frozen_emp"); } public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "emp", "employees"); loadLibDatasetIntoEs(client, "library"); makeAlias(client, "employees", "emp"); + // frozen index + loadLibDatasetIntoEs(client, "archive"); + freeze(client, "archive"); } - private static void createString(String name, XContentBuilder builder) throws Exception { + public static void createString(String name, XContentBuilder builder) throws Exception { builder.startObject(name).field("type", "text") .startObject("fields") .startObject("keyword").field("type", "keyword").endObject() @@ -286,12 +292,18 @@ protected static void loadLibDatasetIntoEs(RestClient client, String index) thro Response response = client.performRequest(request); } - protected static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { + public static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { for (String index : indices) { client.performRequest(new Request("POST", "/" + index + "/_alias/" + aliasName)); } } + protected static void freeze(RestClient client, String... indices) throws Exception { + for (String index : indices) { + client.performRequest(new Request("POST", "/" + index + "/_freeze")); + } + } + private static void csvToLines(String name, CheckedBiConsumer, List, Exception> consumeLine) throws Exception { String location = "/" + name + ".csv"; URL dataSet = SqlSpecTestCase.class.getResource(location); diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java index 8931fe0264e9d..76894fc5a53d5 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcAssert.java @@ -8,18 +8,25 @@ import com.carrotsearch.hppc.IntObjectHashMap; import org.apache.logging.log4j.Logger; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.utils.WellKnownText; import org.elasticsearch.xpack.sql.jdbc.EsType; import org.elasticsearch.xpack.sql.proto.StringUtils; import org.relique.jdbc.csv.CsvResultSet; +import java.io.IOException; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; +import java.text.ParseException; import java.time.temporal.TemporalAmount; import java.util.ArrayList; +import java.util.Calendar; import java.util.List; import java.util.Locale; +import java.util.TimeZone; import static java.lang.String.format; import static java.sql.Types.BIGINT; @@ -29,6 +36,8 @@ import static java.sql.Types.REAL; import static java.sql.Types.SMALLINT; import static java.sql.Types.TINYINT; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -38,6 +47,7 @@ * Utility class for doing JUnit-style asserts over JDBC. */ public class JdbcAssert { + private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); private static final IntObjectHashMap SQL_TO_TYPE = new IntObjectHashMap<>(); @@ -139,6 +149,11 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, expectedType = Types.TIMESTAMP; } + // H2 treats GEOMETRY as OTHER + if (expectedType == Types.OTHER && nameOf(actualType).startsWith("GEO_") ) { + actualType = Types.OTHER; + } + // since csv doesn't support real, we use float instead..... if (expectedType == Types.FLOAT && expected instanceof CsvResultSet) { expectedType = Types.REAL; @@ -251,6 +266,24 @@ else if (type == Types.DOUBLE) { assertEquals(msg, (double) expectedObject, (double) actualObject, lenientFloatingNumbers ? 1d : 0.0d); } else if (type == Types.FLOAT) { assertEquals(msg, (float) expectedObject, (float) actualObject, lenientFloatingNumbers ? 1f : 0.0f); + } else if (type == Types.OTHER) { + if (actualObject instanceof Geometry) { + // We need to convert the expected object to libs/geo Geometry for comparision + try { + expectedObject = WellKnownText.fromWKT(expectedObject.toString()); + } catch (IOException | ParseException ex) { + fail(ex.getMessage()); + } + } + if (actualObject instanceof Point) { + // geo points are loaded form doc values where they are stored as long-encoded values leading + // to lose in precision + assertThat(expectedObject, instanceOf(Point.class)); + assertEquals(((Point) expectedObject).getLat(), ((Point) actualObject).getLat(), 0.000001d); + assertEquals(((Point) expectedObject).getLon(), ((Point) actualObject).getLon(), 0.000001d); + } else { + assertEquals(msg, expectedObject, actualObject); + } } // intervals else if (type == Types.VARCHAR && actualObject instanceof TemporalAmount) { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java index e6295985cf519..2f3ce7eaddd88 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/LocalH2.java @@ -81,4 +81,4 @@ protected void after() { public Connection get() throws SQLException { return DriverManager.getConnection(url, DEFAULTS); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ShowTablesTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ShowTablesTestCase.java index ab2ddc9a7fe24..4a6882685a928 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ShowTablesTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ShowTablesTestCase.java @@ -30,7 +30,7 @@ public void testShowTablesWithManyIndices() throws Exception { for (int i = 0; i < indices; i++) { String index = String.format(Locale.ROOT, "test%02d", i); index(index, builder -> builder.field("name", "bob")); - h2.createStatement().executeUpdate("INSERT INTO mock VALUES ('" + index + "', 'BASE TABLE');"); + h2.createStatement().executeUpdate("INSERT INTO mock VALUES ('" + index + "', 'BASE TABLE', 'INDEX');"); } ResultSet expected = h2.createStatement().executeQuery("SELECT * FROM mock ORDER BY name"); diff --git a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec index 4134db187c9a6..e18a5ab7efcce 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec @@ -82,20 +82,20 @@ salary |INTEGER |integer showAlias SHOW TABLES LIKE 'test\_alias' ESCAPE '\'; -name:s | type:s +name:s | type:s | kind:s -test_alias | VIEW +test_alias | VIEW | ALIAS ; showPattern SHOW TABLES LIKE 'test_%'; -name:s | type:s +name:s | type:s | kind :s -test_alias | VIEW -test_alias_emp | VIEW -test_emp | BASE TABLE -test_emp_copy | BASE TABLE +test_alias | VIEW | ALIAS +test_alias_emp | VIEW | ALIAS +test_emp | BASE TABLE | INDEX +test_emp_copy | BASE TABLE | INDEX ; groupByOnAlias diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index f60f686cb8a3d..073788511d0f0 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -129,11 +129,20 @@ RIGHT |SCALAR RTRIM |SCALAR SPACE |SCALAR SUBSTRING |SCALAR -UCASE |SCALAR +UCASE |SCALAR CAST |SCALAR CONVERT |SCALAR DATABASE |SCALAR USER |SCALAR +ST_ASTEXT |SCALAR +ST_ASWKT |SCALAR +ST_DISTANCE |SCALAR +ST_GEOMETRYTYPE |SCALAR +ST_GEOMFROMTEXT |SCALAR +ST_WKTTOSQL |SCALAR +ST_X |SCALAR +ST_Y |SCALAR +ST_Z |SCALAR SCORE |SCORE ; @@ -189,49 +198,49 @@ TODAY |SCALAR showTables SHOW TABLES; - name | type -logs |BASE TABLE -test_alias |VIEW -test_alias_emp |VIEW -test_emp |BASE TABLE -test_emp_copy |BASE TABLE + name | type | kind +logs |BASE TABLE |INDEX +test_alias |VIEW |ALIAS +test_alias_emp |VIEW |ALIAS +test_emp |BASE TABLE |INDEX +test_emp_copy |BASE TABLE |INDEX ; showTablesSimpleLike SHOW TABLES LIKE 'test_emp'; - name:s | type:s -test_emp |BASE TABLE + name:s | type:s | kind:s +test_emp |BASE TABLE |INDEX ; showTablesMultiLike SHOW TABLES LIKE 'test_emp%'; - name:s | type:s -test_emp |BASE TABLE -test_emp_copy |BASE TABLE + name:s | type:s |kind:s +test_emp |BASE TABLE |INDEX +test_emp_copy |BASE TABLE |INDEX ; showTablesIdentifier SHOW TABLES "test_emp"; - name:s | type:s -test_emp |BASE TABLE + name:s | type:s |kind:s +test_emp |BASE TABLE |INDEX ; showTablesIdentifierPattern SHOW TABLES "test_e*,-test_emp"; - name:s | type:s -test_emp_copy |BASE TABLE + name:s | type:s |kind:s +test_emp_copy |BASE TABLE |INDEX ; showTablesIdentifierPatternOnAliases SHOW TABLES "test*,-test_emp*"; - name:s | type:s -test_alias |VIEW -test_alias_emp |VIEW + name:s | type:s | kind:s +test_alias |VIEW |ALIAS +test_alias_emp |VIEW |ALIAS ; // DESCRIBE diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec index 8d9a65d1b85b6..bfb28775bc3b6 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec @@ -182,6 +182,26 @@ SELECT -2 * INTERVAL '1 23:45' DAY TO MINUTES AS result; -3 23:30:00.0 ; +intervalHoursMultiply +SELECT 4 * -INTERVAL '2' HOURS AS result1, -5 * -INTERVAL '3' HOURS AS result2; + result1 | result2 +---------------+-------------- +-0 08:00:00.0 | +0 15:00:00.0 +; + +intervalAndFieldMultiply +schema::languages:byte|result:string +SELECT languages, CAST (languages * INTERVAL '1 10:30' DAY TO MINUTES AS string) AS result FROM test_emp ORDER BY emp_no LIMIT 5; + + languages | result +---------------+--------------------------------------------- +2 | +2 21:00:00.0 +5 | +7 04:30:00.0 +4 | +5 18:00:00.0 +5 | +7 04:30:00.0 +1 | +1 10:30:00.0 +; + dateMinusInterval SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec index c181023db03eb..936c7eef88191 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/docs.csv-spec @@ -93,11 +93,11 @@ showTables // tag::showTables SHOW TABLES; - name | type ----------------+--------------- -emp |BASE TABLE -employees |VIEW -library |BASE TABLE + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX +employees |VIEW |ALIAS +library |BASE TABLE |INDEX // end::showTables ; @@ -106,9 +106,9 @@ showTablesLikeExact // tag::showTablesLikeExact SHOW TABLES LIKE 'emp'; - name | type ----------------+--------------- -emp |BASE TABLE + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX // end::showTablesLikeExact ; @@ -117,10 +117,10 @@ showTablesLikeWildcard // tag::showTablesLikeWildcard SHOW TABLES LIKE 'emp%'; - name | type ----------------+--------------- -emp |BASE TABLE -employees |VIEW + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX +employees |VIEW |ALIAS // end::showTablesLikeWildcard ; @@ -130,9 +130,9 @@ showTablesLikeOneChar // tag::showTablesLikeOneChar SHOW TABLES LIKE 'em_'; - name | type ----------------+--------------- -emp |BASE TABLE + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX // end::showTablesLikeOneChar ; @@ -141,20 +141,20 @@ showTablesLikeMixed // tag::showTablesLikeMixed SHOW TABLES LIKE '%em_'; - name | type ----------------+--------------- -emp |BASE TABLE + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX // end::showTablesLikeMixed ; showTablesLikeEscape -schema::name:s|type:s +schema::name:s|type:s|kind:s // tag::showTablesLikeEscape SHOW TABLES LIKE 'emp!%' ESCAPE '!'; - name | type ----------------+--------------- + name | type | kind +---------------+---------------+--------------- // end::showTablesLikeEscape ; @@ -164,14 +164,33 @@ showTablesEsMultiIndex // tag::showTablesEsMultiIndex SHOW TABLES "*,-l*"; - name | type ----------------+--------------- -emp |BASE TABLE -employees |VIEW + name | type | kind +---------------+---------------+--------------- +emp |BASE TABLE |INDEX +employees |VIEW |ALIAS // end::showTablesEsMultiIndex ; + +// +// include FROZEN +// +showTablesIncludeFrozen +// tag::showTablesIncludeFrozen +SHOW TABLES INCLUDE FROZEN; + + name | type | kind +---------------+---------------+--------------- +archive |BASE TABLE |FROZEN INDEX +emp |BASE TABLE |INDEX +employees |VIEW |ALIAS +library |BASE TABLE |INDEX + +// end::showTablesIncludeFrozen +; + + /////////////////////////////// // // Show Functions @@ -182,7 +201,7 @@ showFunctions // tag::showFunctions SHOW FUNCTIONS; - name | type + name | type -----------------+--------------- AVG |AGGREGATE COUNT |AGGREGATE @@ -306,13 +325,21 @@ RIGHT |SCALAR RTRIM |SCALAR SPACE |SCALAR SUBSTRING |SCALAR -UCASE |SCALAR +UCASE |SCALAR CAST |SCALAR CONVERT |SCALAR DATABASE |SCALAR USER |SCALAR +ST_ASTEXT |SCALAR +ST_ASWKT |SCALAR +ST_DISTANCE |SCALAR +ST_GEOMETRYTYPE |SCALAR +ST_GEOMFROMTEXT |SCALAR +ST_WKTTOSQL |SCALAR +ST_X |SCALAR +ST_Y |SCALAR +ST_Z |SCALAR SCORE |SCORE - // end::showFunctions ; @@ -463,6 +490,17 @@ SELECT * FROM "emp" LIMIT 1; // end::fromTableQuoted ; +fromTableIncludeFrozen +// tag::fromTableIncludeFrozen +SELECT * FROM FROZEN archive LIMIT 1; + + author | name | page_count | release_date +-----------------+--------------------+---------------+-------------------- +James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00Z + +// end::fromTableIncludeFrozen +; + fromTableQuoted // tag::fromTablePatternQuoted SELECT emp_no FROM "e*p" LIMIT 1; diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec new file mode 100644 index 0000000000000..60fbebfc13950 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/docs/geo.csv-spec @@ -0,0 +1,79 @@ +// +// CSV spec used by the geo docs +// + +/////////////////////////////// +// +// ST_AsWKT() +// +/////////////////////////////// + +selectAsWKT +// tag::aswkt +SELECT city, ST_AsWKT(location) location FROM "geo" WHERE city = 'Amsterdam'; + + city:s | location:s +Amsterdam |point (4.850311987102032 52.347556999884546) +// end::aswkt +; + +selectWKTToSQL +// tag::wkttosql +SELECT CAST(ST_WKTToSQL('POINT (10 20)') AS STRING) location; + + location:s +point (10.0 20.0) +// end::wkttosql +; + + +selectDistance +// tag::distance +SELECT ST_Distance(ST_WKTToSQL('POINT (10 20)'), ST_WKTToSQL('POINT (20 30)')) distance; + + distance:d +1499101.2889383635 +// end::distance +; + +/////////////////////////////// +// +// Geometry Properties +// +/////////////////////////////// + +selectGeometryType +// tag::geometrytype +SELECT ST_GeometryType(ST_WKTToSQL('POINT (10 20)')) type; + + type:s +POINT +// end::geometrytype +; + +selectX +// tag::x +SELECT ST_X(ST_WKTToSQL('POINT (10 20)')) x; + + x:d +10.0 +// end::x +; + +selectY +// tag::y +SELECT ST_Y(ST_WKTToSQL('POINT (10 20)')) y; + + y:d +20.0 +// end::y +; + +selectZ +// tag::z +SELECT ST_Z(ST_WKTToSQL('POINT (10 20 30)')) z; + + z:d +30.0 +// end::z +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv new file mode 100644 index 0000000000000..8275bd7c884ef --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geo.csv @@ -0,0 +1,16 @@ +city,region,region_point,location,shape +Mountain View,Americas,POINT(-105.2551 54.5260),point (-122.083843 37.386483),point (-122.083843 37.386483) +Chicago,Americas,POINT(-105.2551 54.5260),point (-87.637874 41.888783),point (-87.637874 41.888783) +New York,Americas,POINT(-105.2551 54.5260),point (-73.990027 40.745171),point (-73.990027 40.745171) +San Francisco,Americas,POINT(-105.2551 54.5260),point (-122.394228 37.789541),point (-122.394228 37.789541) +Phoenix,Americas,POINT(-105.2551 54.5260),point (-111.973505 33.376242),point (-111.973505 33.376242) +Amsterdam,Europe,POINT(15.2551 54.5260),point (4.850312 52.347557),point (4.850312 52.347557) +Berlin,Europe,POINT(15.2551 54.5260),point (13.390889 52.486701),point (13.390889 52.486701) +Munich,Europe,POINT(15.2551 54.5260),point (11.537505 48.146321),point (11.537505 48.146321) +London,Europe,POINT(15.2551 54.5260),point (-0.121672 51.510871),point (-0.121672 51.510871) +Paris,Europe,POINT(15.2551 54.5260),point (2.351773 48.845538),point (2.351773 48.845538) +Singapore,Asia,POINT(100.6197 34.0479),point (103.855535 1.295868),point (103.855535 1.295868) +Hong Kong,Asia,POINT(100.6197 34.0479),point (114.183925 22.281397),point (114.183925 22.281397) +Seoul,Asia,POINT(100.6197 34.0479),point (127.060851 37.509132),point (127.060851 37.509132) +Tokyo,Asia,POINT(100.6197 34.0479),point (139.76402225 35.669616),point (139.76402225 35.669616) +Sydney,Asia,POINT(100.6197 34.0479),point (151.208629 -33.863385),point (151.208629 -33.863385) diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json new file mode 100644 index 0000000000000..8c65742aac063 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql-bulk.json @@ -0,0 +1,33 @@ +{"index":{"_id": "1"}} +{"region": "Americas", "city": "Mountain View", "location": {"lat":"37.386483", "lon":"-122.083843"}, "location_no_dv": {"lat":"37.386483", "lon":"-122.083843"}, "shape": "POINT (-122.083843 37.386483 30)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "2"}} +{"region": "Americas", "city": "Chicago", "location": [-87.637874, 41.888783], "location_no_dv": [-87.637874, 41.888783], "shape": {"type" : "point", "coordinates" : [-87.637874, 41.888783, 181]}, "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "3"}} +{"region": "Americas", "city": "New York", "location": "40.745171,-73.990027", "location_no_dv": "40.745171,-73.990027", "shape": "POINT (-73.990027 40.745171 10)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "4"}} +{"region": "Americas", "city": "San Francisco", "location": "37.789541,-122.394228", "location_no_dv": "37.789541,-122.394228", "shape": "POINT (-122.394228 37.789541 16)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "5"}} +{"region": "Americas", "city": "Phoenix", "location": "33.376242,-111.973505", "location_no_dv": "33.376242,-111.973505", "shape": "POINT (-111.973505 33.376242 331)", "region_point": "POINT(-105.2551 54.5260)"} +{"index":{"_id": "6"}} +{"region": "Europe", "city": "Amsterdam", "location": "52.347557,4.850312", "location_no_dv": "52.347557,4.850312", "shape": "POINT (4.850312 52.347557 2)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "7"}} +{"region": "Europe", "city": "Berlin", "location": "52.486701,13.390889", "location_no_dv": "52.486701,13.390889", "shape": "POINT (13.390889 52.486701 34)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "8"}} +{"region": "Europe", "city": "Munich", "location": "48.146321,11.537505", "location_no_dv": "48.146321,11.537505", "shape": "POINT (11.537505 48.146321 519)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "9"}} +{"region": "Europe", "city": "London", "location": "51.510871,-0.121672", "location_no_dv": "51.510871,-0.121672", "shape": "POINT (-0.121672 51.510871 11)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "10"}} +{"region": "Europe", "city": "Paris", "location": "48.845538,2.351773", "location_no_dv": "48.845538,2.351773", "shape": "POINT (2.351773 48.845538 35)", "region_point": "POINT(15.2551 54.5260)"} +{"index":{"_id": "11"}} +{"region": "Asia", "city": "Singapore", "location": "1.295868,103.855535", "location_no_dv": "1.295868,103.855535", "shape": "POINT (103.855535 1.295868 15)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "12"}} +{"region": "Asia", "city": "Hong Kong", "location": "22.281397,114.183925", "location_no_dv": "22.281397,114.183925", "shape": "POINT (114.183925 22.281397 552)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "13"}} +{"region": "Asia", "city": "Seoul", "location": "37.509132,127.060851", "location_no_dv": "37.509132,127.060851", "shape": "POINT (127.060851 37.509132 38)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "14"}} +{"region": "Asia", "city": "Tokyo", "location": "35.669616,139.76402225", "location_no_dv": "35.669616,139.76402225", "shape": "POINT (139.76402225 35.669616 40)", "region_point": "POINT(100.6197 34.0479)"} +{"index":{"_id": "15"}} +{"region": "Asia", "city": "Sydney", "location": "-33.863385,151.208629", "location_no_dv": "-33.863385,151.208629", "shape": "POINT (151.208629 -33.863385 100)", "region_point": "POINT(100.6197 34.0479)"} + + + diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec new file mode 100644 index 0000000000000..31f3857216c0b --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.csv-spec @@ -0,0 +1,288 @@ +// +// Commands on geo test data +// + +showTables +SHOW TABLES "geo"; + + name:s | type:s | kind:s +geo |BASE TABLE |INDEX +; + +// DESCRIBE + +describe +DESCRIBE "geo"; + + column:s | type:s | mapping:s +city | VARCHAR | keyword +location | GEOMETRY | geo_point +location_no_dv | GEOMETRY | geo_point +region | VARCHAR | keyword +region_point | VARCHAR | keyword +shape | GEOMETRY | geo_shape +; + +// SELECT ALL +// TODO: For now we just get geopoint formatted as is and we also need to convert it to STRING to work with CSV + +selectAllPointsAsStrings +SELECT city, CAST(location AS STRING) location, CAST(location_no_dv AS STRING) location_no_dv, CAST(shape AS STRING) shape, region FROM "geo" ORDER BY "city"; + + city:s | location:s | location_no_dv:s | shape:s | region:s +Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557) |point (4.850312 52.347557 2.0) |Europe +Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701) |point (13.390889 52.486701 34.0) |Europe +Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783) |point (-87.637874 41.888783 181.0) |Americas +Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397) |point (114.183925 22.281397 552.0) |Asia +London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871) |point (-0.121672 51.510871 11.0) |Europe +Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483) |point (-122.083843 37.386483 30.0) |Americas +Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321) |point (11.537505 48.146321 519.0) |Europe +New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171) |point (-73.990027 40.745171 10.0) |Americas +Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538) |point (2.351773 48.845538 35.0) |Europe +Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242) |point (-111.973505 33.376242 331.0) |Americas +San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541) |point (-122.394228 37.789541 16.0) |Americas +Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132) |point (127.060851 37.509132 38.0) |Asia +Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868) |point (103.855535 1.295868 15.0) |Asia +Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385) |point (151.208629 -33.863385 100.0) |Asia +Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616)|point (139.76402225 35.669616 40.0) |Asia +; + +// TODO: Both shape and location contain the same data for now, we should change it later to make things more interesting +selectAllPointsAsWKT +SELECT city, ST_ASWKT(location) location_wkt, ST_ASWKT(shape) shape_wkt, region FROM "geo" ORDER BY "city"; + + city:s | location_wkt:s | shape_wkt:s | region:s +Amsterdam |point (4.850311987102032 52.347556999884546) |point (4.850312 52.347557 2.0) |Europe +Berlin |point (13.390888944268227 52.48670099303126) |point (13.390889 52.486701 34.0) |Europe +Chicago |point (-87.63787407428026 41.888782968744636) |point (-87.637874 41.888783 181.0) |Americas +Hong Kong |point (114.18392493389547 22.28139698971063) |point (114.183925 22.281397 552.0) |Asia +London |point (-0.12167204171419144 51.51087098289281)|point (-0.121672 51.510871 11.0) |Europe +Mountain View |point (-122.08384302444756 37.38648299127817) |point (-122.083843 37.386483 30.0) |Americas +Munich |point (11.537504978477955 48.14632098656148) |point (11.537505 48.146321 519.0) |Europe +New York |point (-73.9900270756334 40.74517097789794) |point (-73.990027 40.745171 10.0) |Americas +Paris |point (2.3517729341983795 48.84553796611726) |point (2.351773 48.845538 35.0) |Europe +Phoenix |point (-111.97350500151515 33.37624196894467) |point (-111.973505 33.376242 331.0) |Americas +San Francisco |point (-122.39422800019383 37.789540970698) |point (-122.394228 37.789541 16.0) |Americas +Seoul |point (127.06085099838674 37.50913198571652) |point (127.060851 37.509132 38.0) |Asia +Singapore |point (103.8555349688977 1.2958679627627134) |point (103.855535 1.295868 15.0) |Asia +Sydney |point (151.20862897485495 -33.863385021686554)|point (151.208629 -33.863385 100.0) |Asia +Tokyo |point (139.76402222178876 35.66961596254259) |point (139.76402225 35.669616 40.0) |Asia +; + +selectWithAsWKTInWhere +SELECT city, ST_ASWKT(location) location_wkt, region FROM "geo" WHERE LOCATE('114', ST_ASWKT(location)) > 0 ORDER BY "city"; + + city:s | location_wkt:s | region:s +Hong Kong |point (114.18392493389547 22.28139698971063)|Asia +; + +selectAllPointsOrderByLonFromAsWKT +SELECT city, SUBSTRING(ST_ASWKT(location), 8, LOCATE(' ', ST_ASWKT(location), 8) - 8) lon FROM "geo" ORDER BY lon; + + city:s | lon:s +London |-0.12167204171419144 +Phoenix |-111.97350500151515 +Mountain View |-122.08384302444756 +San Francisco |-122.39422800019383 +New York |-73.9900270756334 +Chicago |-87.63787407428026 +Singapore |103.8555349688977 +Munich |11.537504978477955 +Hong Kong |114.18392493389547 +Seoul |127.06085099838674 +Berlin |13.390888944268227 +Tokyo |139.76402222178876 +Sydney |151.20862897485495 +Paris |2.3517729341983795 +Amsterdam |4.850311987102032 +; + +selectAllPointsGroupByHemisphereFromAsWKT +SELECT COUNT(city) count, CAST(SUBSTRING(ST_ASWKT(location), 8, 1) = '-' AS STRING) west FROM "geo" GROUP BY west ORDER BY west; + + count:l | west:s +9 |false +6 |true +; + +selectRegionUsingWktToSql +SELECT region, city, ST_ASWKT(ST_WKTTOSQL(region_point)) region_wkt FROM geo ORDER BY region, city; + + region:s | city:s | region_wkt:s +Americas |Chicago |point (-105.2551 54.526) +Americas |Mountain View |point (-105.2551 54.526) +Americas |New York |point (-105.2551 54.526) +Americas |Phoenix |point (-105.2551 54.526) +Americas |San Francisco |point (-105.2551 54.526) +Asia |Hong Kong |point (100.6197 34.0479) +Asia |Seoul |point (100.6197 34.0479) +Asia |Singapore |point (100.6197 34.0479) +Asia |Sydney |point (100.6197 34.0479) +Asia |Tokyo |point (100.6197 34.0479) +Europe |Amsterdam |point (15.2551 54.526) +Europe |Berlin |point (15.2551 54.526) +Europe |London |point (15.2551 54.526) +Europe |Munich |point (15.2551 54.526) +Europe |Paris |point (15.2551 54.526) +; + +selectCitiesWithAGroupByWktToSql +SELECT COUNT(city) city_by_region, CAST(ST_WKTTOSQL(region_point) AS STRING) region FROM geo WHERE city LIKE '%a%' GROUP BY ST_WKTTOSQL(region_point) ORDER BY ST_WKTTOSQL(region_point); + + city_by_region:l | region:s +3 |point (-105.2551 54.526) +1 |point (100.6197 34.0479) +2 |point (15.2551 54.526) +; + +selectCitiesWithEOrderByWktToSql +SELECT region, city FROM geo WHERE city LIKE '%e%' ORDER BY ST_WKTTOSQL(region_point), city; + + region:s | city:s +Americas |Mountain View +Americas |New York +Americas |Phoenix +Asia |Seoul +Asia |Singapore +Asia |Sydney +Europe |Amsterdam +Europe |Berlin +; + + +selectCitiesByDistance +SELECT region, city, ST_Distance(location, ST_WktToSQL('POINT (-71 42)')) distance FROM geo WHERE distance < 5000000 ORDER BY region, city; + + region:s | city:s | distance:d +Americas |Chicago |1373941.5140200066 +Americas |Mountain View |4335936.909375596 +Americas |New York |285839.6579622518 +Americas |Phoenix |3692895.0346903414 +Americas |San Francisco |4343565.010996301 +; + +selectCitiesByDistanceFloored +SELECT region, city, FLOOR(ST_Distance(location, ST_WktToSQL('POINT (-71 42)'))) distance FROM geo WHERE distance < 5000000 ORDER BY region, city; + + region:s | city:s | distance:l +Americas |Chicago |1373941 +Americas |Mountain View |4335936 +Americas |New York |285839 +Americas |Phoenix |3692895 +Americas |San Francisco |4343565 +; + +selectCitiesOrderByDistance +SELECT region, city FROM geo ORDER BY ST_Distance(location, ST_WktToSQL('POINT (-71 42)')) ; + + region:s | city:s +Americas |New York +Americas |Chicago +Americas |Phoenix +Americas |Mountain View +Americas |San Francisco +Europe |London +Europe |Paris +Europe |Amsterdam +Europe |Berlin +Europe |Munich +Asia |Tokyo +Asia |Seoul +Asia |Hong Kong +Asia |Singapore +Asia |Sydney +; + +groupCitiesByDistance +SELECT COUNT(*) count, FIRST(region) region FROM geo GROUP BY FLOOR(ST_Distance(location, ST_WktToSQL('POINT (-71 42)'))/5000000); + + count:l | region:s +5 |Americas +5 |Europe +3 |Asia +2 |Asia +; + +selectWktToSqlOfNull +SELECT ST_ASWKT(ST_WktToSql(NULL)) shape; + shape:s +null +; + +selectWktToSqlOfNull +SELECT ST_Distance(ST_WktToSql(NULL), ST_WktToSQL('POINT (-71 42)')) shape; + shape:d +null +; + +groupByGeometryType +SELECT COUNT(*) cnt, ST_GeometryType(location) gt FROM geo GROUP BY ST_GeometryType(location); + + cnt:l | gt:s +15 |POINT +; + + +groupAndOrderByGeometryType +SELECT COUNT(*) cnt, ST_GeometryType(location) gt FROM geo GROUP BY gt ORDER BY gt; + + cnt:l | gt:s +15 |POINT +; + +groupByEastWest +SELECT COUNT(*) cnt, FLOOR(ST_X(location)/90) east FROM geo GROUP BY east ORDER BY east; + + cnt:l | east:l +3 |-2 +3 |-1 +4 |0 +5 |1 +; + +groupByNorthSouth +SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north FROM geo GROUP BY north ORDER BY north; + + cnt:l | north:l +1 |-1 +9 |0 +5 |1 +; + +groupByNorthEastSortByEastNorth +SELECT COUNT(*) cnt, FLOOR(ST_Y(location)/45) north, FLOOR(ST_X(location)/90) east FROM geo GROUP BY north, east ORDER BY east, north; + + cnt:l | north:l | east:l +3 |0 |-2 +2 |0 |-1 +1 |1 |-1 +4 |1 |0 +1 |-1 |1 +4 |0 |1 +; + +selectFilterByXOfLocation +SELECT city, ST_X(shape) x, ST_Y(shape) y, ST_Z(shape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; + + city:s | x:d | y:d | z:d | lx:d | ly:d +Sydney |151.208629 |-33.863385 |100.0 |151.20862897485495|-33.863385021686554 +Singapore |103.855535 |1.295868 |15.0 |103.8555349688977 |1.2958679627627134 +Hong Kong |114.183925 |22.281397 |552.0 |114.18392493389547|22.28139698971063 +Tokyo |139.76402225 |35.669616 |40.0 |139.76402222178876|35.66961596254259 +Seoul |127.060851 |37.509132 |38.0 |127.06085099838674|37.50913198571652 +Munich |11.537505 |48.146321 |519.0 |11.537504978477955|48.14632098656148 +Paris |2.351773 |48.845538 |35.0 |2.3517729341983795|48.84553796611726 +Amsterdam |4.850312 |52.347557 |2.0 |4.850311987102032 |52.347556999884546 +Berlin |13.390889 |52.486701 |34.0 |13.390888944268227|52.48670099303126 +; + +selectFilterByRegionPoint +SELECT city, region, ST_X(location) x FROM geo WHERE ST_X(ST_WKTTOSQL(region_point)) < 0 ORDER BY x; + + city:s | region:s | x:d +San Francisco |Americas |-122.39422800019383 +Mountain View |Americas |-122.08384302444756 +Phoenix |Americas |-111.97350500151515 +Chicago |Americas |-87.63787407428026 +New York |Americas |-73.9900270756334 +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json new file mode 100644 index 0000000000000..56007a0284c43 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json @@ -0,0 +1,28 @@ +{ + "settings": { + "number_of_shards": 1 + }, + "mappings": { + "properties": { + "region": { + "type": "keyword" + }, + "city": { + "type": "keyword" + }, + "location": { + "type": "geo_point" + }, + "location_no_dv": { + "type": "geo_point", + "doc_values": "false" + }, + "shape": { + "type": "geo_shape" + }, + "region_point": { + "type": "keyword" + } + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec new file mode 100644 index 0000000000000..e801d8477f6bf --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.sql-spec @@ -0,0 +1,24 @@ +// +// Commands on geo test data +// + +selectAllShapesAsGeometries +SELECT city, shape, region FROM "geo" ORDER BY "city"; + +selectAllShapesAsWKT +SELECT city, ST_GEOMFROMTEXT(ST_ASWKT(shape)) shape_wkt, region FROM "geo" ORDER BY "city"; + +selectAllPointsAsGeometries +SELECT city, location, region FROM "geo" ORDER BY "city"; + +selectAllPointsAsWKT +SELECT city, ST_GEOMFROMTEXT(ST_ASWKT(location)) shape_wkt, region FROM "geo" ORDER BY "city"; + +selectRegionUsingWktToSqlWithoutConvertion +SELECT region, city, shape, ST_GEOMFROMTEXT(region_point) region_wkt FROM geo ORDER BY region, city; + +selectCitiesWithGroupByWktToSql +SELECT COUNT(city) city_by_region, ST_GEOMFROMTEXT(region_point) region_geom FROM geo WHERE city LIKE '%a%' GROUP BY region_geom ORDER BY city_by_region; + +selectCitiesWithOrderByWktToSql +SELECT region, city, UCASE(ST_ASWKT(ST_GEOMFROMTEXT(region_point))) region_wkt FROM geo WHERE city LIKE '%e%' ORDER BY region_wkt, city; diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql new file mode 100644 index 0000000000000..b8b8d4e36f453 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS "geo"; +CREATE TABLE "geo" ( + "city" VARCHAR(50), + "region" VARCHAR(50), + "region_point" VARCHAR(50), + "location" POINT, + "shape" GEOMETRY +) + AS SELECT * FROM CSVREAD('classpath:/geo/geo.csv'); diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt b/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt new file mode 100644 index 0000000000000..ac061f5cc4493 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/OGC-NOTICE.txt @@ -0,0 +1,41 @@ +Software Notice + +This OGC work (including software, documents, or other related items) is being +provided by the copyright holders under the following license. By obtaining, +using and/or copying this work, you (the licensee) agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, and modify this software and its documentation, with +or without modification, for any purpose and without fee or royalty is hereby +granted, provided that you include the following on ALL copies of the software +and documentation or portions thereof, including modifications, that you make: + +1. The full text of this NOTICE in a location viewable to users of the +redistributed or derivative work. + +2. Any pre-existing intellectual property disclaimers, notices, or terms and +conditions. If none exist, a short notice of the following form (hypertext is +preferred, text is permitted) should be used within the body of any +redistributed or derivative code: "Copyright © [$date-of-document] Open +Geospatial Consortium, Inc. All Rights Reserved. +http://www.opengeospatial.org/ogc/legal (Hypertext is preferred, but a textual +representation is permitted.) + +3. Notice of any changes or modifications to the OGC files, including the date +changes were made. (We recommend you provide URIs to the location from which +the code is derived.) + + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE +NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT +THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY +ATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. + +COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENTATION. + +The name and trademarks of copyright holders may NOT be used in advertising or +publicity pertaining to the software without specific, written prior permission. +Title to copyright in this software and any associated documentation will at all +times remain with copyright holders. \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec new file mode 100644 index 0000000000000..f1941161697d2 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec @@ -0,0 +1,36 @@ +// +// Commands on OGC data +// + +showTables +SHOW TABLES "ogc"; + + name:s | type:s | kind:s +ogc |BASE TABLE |INDEX +; + +// DESCRIBE + +describe +DESCRIBE "ogc"; + + column:s | type:s | mapping:s +address | VARCHAR | text +address.keyword | VARCHAR | keyword +aliases | VARCHAR | text +aliases.keyword | VARCHAR | keyword +boundary | GEOMETRY | geo_shape +centerline | GEOMETRY | geo_shape +centerlines | GEOMETRY | geo_shape +fid | INTEGER | integer +footprint | GEOMETRY | geo_shape +name | VARCHAR | text +name.keyword | VARCHAR | keyword +neatline | GEOMETRY | geo_shape +num_lanes | INTEGER | integer +ogc_type | VARCHAR | keyword +position | GEOMETRY | geo_shape +shore | GEOMETRY | geo_shape +shores | GEOMETRY | geo_shape +type | VARCHAR | keyword +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json new file mode 100644 index 0000000000000..afdf2f5d61ac6 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.json @@ -0,0 +1,58 @@ +// This dataset is derived from OpenGIS Simple Features for SQL (Types and Functions) Test Suite on Apr 1, 2018 +// +// Copyright © 2018 Open Geospatial Consortium, Inc. All Rights Reserved. +// http://www.opengeospatial.org/ogc/legal +// +// lakes +{"index":{"_id": "101"}} +{"ogc_type":"lakes", "fid": 101, "name": "BLUE LAKE", "shore": "POLYGON ((52 18, 66 23, 73 9, 48 6, 52 18), (59 18, 67 18, 67 13, 59 13, 59 18))"} +// +// road segments +{"index":{"_id": "102"}} +{"ogc_type":"road_segments", "fid": 102, "name": "Route 5", "num_lanes": 2, "centerline": "LINESTRING (0 18, 10 21, 16 23, 28 26, 44 31)"} +{"index":{"_id": "103"}} +{"ogc_type":"road_segments", "fid": 103, "name": "Route 5", "aliases": "Main Street", "num_lanes": 4, "centerline": "LINESTRING (44 31, 56 34, 70 38)"} +{"index":{"_id": "104"}} +{"ogc_type":"road_segments", "fid": 104, "name": "Route 5", "num_lanes": 2, "centerline": "LINESTRING (70 38, 72 48)"} +{"index":{"_id": "105"}} +{"ogc_type":"road_segments", "fid": 105, "name": "Main Street", "num_lanes": 4, "centerline": "LINESTRING (70 38, 84 42)"} +{"index":{"_id": "106"}} +{"ogc_type":"road_segments", "fid": 106, "name": "Dirt Road by Green Forest", "num_lanes": 1, "centerline": "LINESTRING (28 26, 28 0)"} +// +// divided routes +{"index":{"_id": "119"}} +{"ogc_type":"divided_routes", "fid": 119, "name": "Route 75", "num_lanes": 4, "centerlines": "MULTILINESTRING ((10 48, 10 21, 10 0), (16 0, 16 23, 16 48))"} +// +// forests +{"index":{"_id": "109"}} +{"ogc_type":"forests", "fid": 109, "name": "Green Forest", "boundary": "MULTIPOLYGON (((28 26, 28 0, 84 0, 84 42, 28 26), (52 18, 66 23, 73 9, 48 6, 52 18)), ((59 18, 67 18, 67 13, 59 13, 59 18)))"} +// +// forests +{"index":{"_id": "110"}} +{"ogc_type":"bridges", "fid": 110, "name": "Cam Bridge", "position": "POINT (44 31)"} +// +// streams +{"index":{"_id": "111"}} +{"ogc_type":"streams", "fid": 111, "name": "Cam Stream", "centerline": "LINESTRING (38 48, 44 41, 41 36, 44 31, 52 18)"} +{"index":{"_id": "112"}} +{"ogc_type":"streams", "fid": 112, "centerline": "LINESTRING (76 0, 78 4, 73 9)"} +// +// buildings +{"index":{"_id": "113"}} +{"ogc_type":"buildings", "fid": 113, "address": "123 Main Street", "position": "POINT (52 30)", "footprint": "POLYGON ((50 31, 54 31, 54 29, 50 29, 50 31))"} +{"index":{"_id": "114"}} +{"ogc_type":"buildings", "fid": 114, "address": "215 Main Street", "position": "POINT (64 33)", "footprint": "POLYGON ((66 34, 62 34, 62 32, 66 32, 66 34))"} +// +// ponds +{"index":{"_id": "120"}} +{"ogc_type":"ponds", "fid": 120, "type": "Stock Pond", "shores": "MULTIPOLYGON (((24 44, 22 42, 24 40, 24 44)), ((26 44, 26 40, 28 42, 26 44)))"} +// +// named places +{"index":{"_id": "117"}} +{"ogc_type":"named_places", "fid": 117, "name": "Ashton", "boundary": "POLYGON ((62 48, 84 48, 84 30, 56 30, 56 34, 62 48))"} +{"index":{"_id": "118"}} +{"ogc_type":"named_places", "fid": 118, "name": "Goose Island", "boundary": "POLYGON ((67 13, 67 18, 59 18, 59 13, 67 13))"} +// +// map neat lines +{"index":{"_id": "115"}} +{"ogc_type":"map_neatlines", "fid": 115, "neatline": "POLYGON ((0 0, 0 48, 84 48, 84 0, 0 0))"} diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec new file mode 100644 index 0000000000000..3976c5a8b181e --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.sql-spec @@ -0,0 +1,85 @@ +// +// Basic GEO SELECT +// + +selectLakes +SELECT fid, name, shore FROM lakes ORDER BY fid; +selectRoadSegments +SELECT fid, name, num_lanes, aliases, centerline FROM road_segments ORDER BY fid; +selectDividedRoutes +SELECT fid, name, num_lanes, centerlines FROM divided_routes ORDER BY fid; +selectForests +SELECT fid, name, boundary FROM forests ORDER BY fid; +selectBridges +SELECT fid, name, position FROM bridges ORDER BY fid; +selectStreams +SELECT fid, name, centerline FROM streams ORDER BY fid; +selectBuildings +SELECT fid, address, position, footprint FROM buildings ORDER BY fid; +selectPonds +SELECT fid, type, name, shores FROM ponds ORDER BY fid; +selectNamedPlaces +SELECT fid, name, boundary FROM named_places ORDER BY fid; +selectMapNeatLines +SELECT fid, neatline FROM map_neatlines ORDER BY fid; + +// +// Type conversion functions +// + +// The string serialization is slightly different between ES and H2, so we need to tweak it a bit by uppercasing both +// and removing floating point +selectRoadSegmentsAsWkt +SELECT fid, name, num_lanes, aliases, REPLACE(UCASE(ST_AsText(centerline)), '.0', '') centerline_wkt FROM road_segments ORDER BY fid; + +selectSinglePoint +SELECT ST_GeomFromText('point (10.0 12.0)') point; + + +// +// Geometry Property Functions +// +// H2GIS doesn't follow the standard here that mandates ST_Dimension returns SMALLINT +selectLakesProps +SELECT fid, UCASE(ST_GeometryType(shore)) type FROM lakes ORDER BY fid; +selectRoadSegmentsProps +SELECT fid, UCASE(ST_GeometryType(centerline)) type FROM road_segments ORDER BY fid; +selectDividedRoutesProps +SELECT fid, UCASE(ST_GeometryType(centerlines)) type FROM divided_routes ORDER BY fid; +selectForestsProps +SELECT fid, UCASE(ST_GeometryType(boundary)) type FROM forests ORDER BY fid; +selectBridgesProps +SELECT fid, UCASE(ST_GeometryType(position)) type FROM bridges ORDER BY fid; +selectStreamsProps +SELECT fid, UCASE(ST_GeometryType(centerline)) type FROM streams ORDER BY fid; +selectBuildingsProps +SELECT fid, UCASE(ST_GeometryType(position)) type1, UCASE(ST_GeometryType(footprint)) type2 FROM buildings ORDER BY fid; +selectPondsProps +SELECT fid, UCASE(ST_GeometryType(shores)) type FROM ponds ORDER BY fid; +selectNamedPlacesProps +SELECT fid, UCASE(ST_GeometryType(boundary)) type FROM named_places ORDER BY fid; +selectMapNeatLinesProps +SELECT fid, UCASE(ST_GeometryType(neatline)) type FROM map_neatlines ORDER BY fid; + +selectLakesXY +SELECT fid, ST_X(shore) x, ST_Y(shore) y FROM lakes ORDER BY fid; +selectRoadSegmentsXY +SELECT fid, ST_X(centerline) x, ST_Y(centerline) y FROM road_segments ORDER BY fid; +selectDividedRoutesXY +SELECT fid, ST_X(centerlines) x, ST_Y(centerlines) y FROM divided_routes ORDER BY fid; +selectForestsXY +SELECT fid, ST_X(boundary) x, ST_Y(boundary) y FROM forests ORDER BY fid; +selectBridgesPositionsXY +SELECT fid, ST_X(position) x, ST_Y(position) y FROM bridges ORDER BY fid; +selectStreamsXY +SELECT fid, ST_X(centerline) x, ST_Y(centerline) y FROM streams ORDER BY fid; +selectBuildingsXY +SELECT fid, ST_X(position) x, ST_Y(position) y FROM buildings ORDER BY fid; +selectBuildingsFootprintsXY +SELECT fid, ST_X(footprint) x, ST_Y(footprint) y FROM buildings ORDER BY fid; +selectPondsXY +SELECT fid, ST_X(shores) x, ST_Y(shores) y FROM ponds ORDER BY fid; +selectNamedPlacesXY +SELECT fid, ST_X(boundary) x, ST_Y(boundary) y FROM named_places ORDER BY fid; +selectMapNeatLinesXY +SELECT fid, ST_X(neatline) x, ST_Y(neatline) y FROM map_neatlines ORDER BY fid; diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql b/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql new file mode 100644 index 0000000000000..6d1322ecd3690 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/sqltsch.sql @@ -0,0 +1,672 @@ +-- FILE: sqltsch.sql 10/01/98 +-- +-- 1 2 3 4 5 6 7 8 +--345678901234567890123456789012345678901234567890123456789012345678901234567890 +--////////////////////////////////////////////////////////////////////////////// +-- +-- Copyright 1998, Open GIS Consortium, Inc. +-- +-- The material in this document details an Open GIS Consortium Test Suite in +-- accordance with a license that your organization has signed. Please refer +-- to http://www.opengeospatial.org/testing/ to obtain a copy of the general license +-- (it is part of the Conformance Testing Agreement). +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- OpenGIS Simple Features for SQL (Types and Functions) Test Suite Software +-- +-- This file "sqltsch.sql" is part 1 of a two part standardized test +-- suite in SQL script form. The other file that is required for this test +-- suite, "sqltque.sql", one additional script is provided ("sqltcle.sql") that +-- performs cleanup operations between test runs, and other documents that +-- describe the OGC Conformance Test Program are available via the WWW at +-- http://www.opengeospatial.org/testing/index.htm +-- +-- NOTE CONCERNING INFORMATION ON CONFORMANCE TESTING AND THIS TEST SUITE +-- ---------------------------------------------------------------------- +-- +-- Organizations wishing to submit product for conformance testing should +-- access the above WWW site to discover the proper procedure for obtaining +-- a license to use the OpenGIS(R) certification mark associated with this +-- test suite. +-- +-- +-- NOTE CONCERNING TEST SUITE ADAPTATION +-- ------------------------------------- +-- +-- OGC recognizes that many products will have to adapt this test suite to +-- make it work properly. OGC has documented the allowable adaptations within +-- this test suite where possible. Other information about adaptations may be +-- discovered in the Test Suite Guidelines document for this test suite. +-- +-- PLEASE NOTE THE OGC REQUIRES THAT ADAPTATIONS ARE FULLY DOCUMENTED USING +-- LIBERAL COMMENT BLOCKS CONFORMING TO THE FOLLOWING FORMAT: +-- +-- -- !#@ ADAPTATION BEGIN +-- explanatory text goes here +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- original sql goes here +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +-- adated sql goes here +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- BEGIN TEST SUITE CODE +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- Create the neccessary feature and geometry tables(views) and metadata tables +-- (views) to load and query the "Blue Lake" test data for OpenGIS Simple +-- Features for SQL (Types and Functions) test. +-- +-- Required feature tables (views) are: +-- Lakes +-- Road Segments +-- Divided Routes +-- Buildings +-- Forests +-- Bridges +-- Named Places +-- Streams +-- Ponds +-- Map Neatlines +-- +-- Please refer to the Test Suite Guidelines for this test suite for further +-- information concerning this test data. +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- CREATE SPATIAL_REF_SYS METADATA TABLE +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- +-- *** ADAPTATION ALERT **** +-- Implementations do not need to execute this statement if they already +-- create the spatial_ref_sys table or view via another mechanism. +-- The size of the srtext VARCHAR exceeds that allowed on some systems. +-- +-- CREATE TABLE spatial_ref_sys ( +-- srid INTEGER NOT NULL PRIMARY KEY, +-- auth_name VARCHAR(256), +-- auth_srid INTEGER, +-- -- srtext VARCHAR(2048) +-- srtext VARCHAR(2000) +-- ); +-- -- +-- INSERT INTO spatial_ref_sys VALUES(101, 'POSC', 32214, +-- 'PROJCS["UTM_ZONE_14N", GEOGCS["World Geodetic System 72", +-- DATUM["WGS_72", SPHEROID["NWL_10D", 6378135, 298.26]], +-- PRIMEM["Greenwich", 0], UNIT["Meter", 1.0]], +-- PROJECTION["Transverse_Mercator"], +-- PARAMETER["False_Easting", 500000.0], +-- PARAMETER["False_Northing", 0.0], +-- PARAMETER["Central_Meridian", -99.0], +-- PARAMETER["Scale_Factor", 0.9996], +-- PARAMETER["Latitude_of_origin", 0.0], +-- UNIT["Meter", 1.0]]' +-- ); +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- CREATE FEATURE SCHEMA +-- +-- *** ADAPTATION ALERT *** +-- The following schema is created using CREATE TABLE statements. +-- Furthermore, it DOES NOT create the GEOMETRY_COLUMNS metadata table. +-- Implementer's should replace the CREATE TABLES below with the mechanism +-- that it uses to create feature tables and the GEOMETRY_COLUMNS table/view +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-------------------------------------------------------------------------------- +-- +-- Create feature tables +-- +-------------------------------------------------------------------------------- +-- +-- Lakes +-- +-- +-- +-- +CREATE TABLE lakes ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + shore POLYGON +); +-- +-- Road Segments +-- +-- +-- +-- +CREATE TABLE road_segments ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + aliases VARCHAR(64), + num_lanes INTEGER, + centerline LINESTRING +); +-- +-- Divided Routes +-- +-- +-- +-- +CREATE TABLE divided_routes ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + num_lanes INTEGER, + centerlines MULTILINESTRING +); +-- +-- Forests +-- +-- +-- +-- +CREATE TABLE forests ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + boundary MULTIPOLYGON +); +-- +-- Bridges +-- +-- +-- +-- +CREATE TABLE bridges ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + position POINT +); +-- +-- Streams +-- +-- +-- +-- +CREATE TABLE streams ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + centerline LINESTRING +); +-- +-- Buildings +-- +--*** ADAPTATION ALERT *** +-- A view could be used to provide the below semantics without multiple geometry +-- columns in a table. In other words, create two tables. One table would +-- contain the POINT position and the other would create the POLYGON footprint. +-- Then create a view with the semantics of the buildings table below. +-- +-- +-- +CREATE TABLE buildings ( + fid INTEGER NOT NULL PRIMARY KEY, + address VARCHAR(64), + position POINT, + footprint POLYGON +); +-- +-- Ponds +-- +-- +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Fixes typo in the MULTIPOYLGON type +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- CREATE TABLE ponds ( +-- fid INTEGER NOT NULL PRIMARY KEY, +-- name VARCHAR(64), +-- type VARCHAR(64), +-- shores MULTIPOYLGON +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +CREATE TABLE ponds ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + type VARCHAR(64), + shores MULTIPOLYGON +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +-- Named Places +-- +-- +-- +-- +CREATE TABLE named_places ( + fid INTEGER NOT NULL PRIMARY KEY, + name VARCHAR(64), + boundary POLYGON +); +-- +-- Map Neatline +-- +-- +-- +-- +CREATE TABLE map_neatlines ( + fid INTEGER NOT NULL PRIMARY KEY, + neatline POLYGON +); +-- +-- +-- +--////////////////////////////////////////////////////////////////////////////// +-- +-- POPULATE GEOMETRY AND FEATURE TABLES +-- +-- *** ADAPTATION ALERT *** +-- This script DOES NOT make any inserts into a GEOMTERY_COLUMNS table/view. +-- Implementers should insert whatever makes this happen in their implementation +-- below. Furthermore, the inserts below may be replaced by whatever mechanism +-- may be provided by implementers to insert rows in feature tables such that +-- metadata (and other mechanisms) are updated properly. +-- +--////////////////////////////////////////////////////////////////////////////// +-- +--============================================================================== +-- Lakes +-- +-- We have one lake, Blue Lake. It is a polygon with a hole. Its geometry is +-- described in WKT format as: +-- 'POLYGON( (52 18, 66 23, 73 9, 48 6, 52 18), +-- (59 18, 67 18, 67 13, 59 13, 59 18) )' +--============================================================================== +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO lakes VALUES (101, 'BLUE LAKE', +-- PolygonFromText('POLYGON((52 18,66 23,73 9,48 6,52 18),(59 18,67 18,67 13,59 13,59 18))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO lakes VALUES (101, 'BLUE LAKE', + ST_PolyFromText('POLYGON((52 18,66 23,73 9,48 6,52 18),(59 18,67 18,67 13,59 13,59 18))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Road segments +-- +-- We have five road segments. Their geometries are all linestrings. +-- The geometries are described in WKT format as: +-- name 'Route 5', fid 102 +-- 'LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' +-- name 'Route 5', fid 103 +-- 'LINESTRING( 44 31, 56 34, 70 38 )' +-- name 'Route 5', fid 104 +-- 'LINESTRING( 70 38, 72 48 )' +-- name 'Main Street', fid 105 +-- 'LINESTRING( 70 38, 84 42 )' +-- name 'Dirt Road by Green Forest', fid 106 +-- 'LINESTRING( 28 26, 28 0 )' +-- +--================== +-- +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO road_segments VALUES(102, 'Route 5', NULL, 2, +-- LineStringFromText('LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(103, 'Route 5', 'Main Street', 4, +-- LineStringFromText('LINESTRING( 44 31, 56 34, 70 38 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(104, 'Route 5', NULL, 2, +-- LineStringFromText('LINESTRING( 70 38, 72 48 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(105, 'Main Street', NULL, 4, +-- LineStringFromText('LINESTRING( 70 38, 84 42 )' ,101) +-- ); +-- INSERT INTO road_segments VALUES(106, 'Dirt Road by Green Forest', NULL, 1, +-- LineStringFromText('LINESTRING( 28 26, 28 0 )',101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO road_segments VALUES(102, 'Route 5', NULL, 2, + ST_LineFromText('LINESTRING( 0 18, 10 21, 16 23, 28 26, 44 31 )' ,101) +); +INSERT INTO road_segments VALUES(103, 'Route 5', 'Main Street', 4, + ST_LineFromText('LINESTRING( 44 31, 56 34, 70 38 )' ,101) +); +INSERT INTO road_segments VALUES(104, 'Route 5', NULL, 2, + ST_LineFromText('LINESTRING( 70 38, 72 48 )' ,101) +); +INSERT INTO road_segments VALUES(105, 'Main Street', NULL, 4, + ST_LineFromText('LINESTRING( 70 38, 84 42 )' ,101) +); +INSERT INTO road_segments VALUES(106, 'Dirt Road by Green Forest', NULL, 1, + ST_LineFromText('LINESTRING( 28 26, 28 0 )',101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- DividedRoutes +-- +-- We have one divided route. Its geometry is a multilinestring. +-- The geometry is described in WKT format as: +-- 'MULTILINESTRING( (10 48, 10 21, 10 0), (16 0, 10 23, 16 48) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO divided_routes VALUES(119, 'Route 75', 4, +-- MultiLineStringFromText('MULTILINESTRING((10 48,10 21,10 0),(16 0,16 23,16 48))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO divided_routes VALUES(119, 'Route 75', 4, + ST_MLineFromText('MULTILINESTRING((10 48,10 21,10 0),(16 0,16 23,16 48))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Forests +-- +-- We have one forest. Its geometry is a multipolygon. +-- The geometry is described in WKT format as: +-- 'MULTIPOLYGON( ( (28 26, 28 0, 84 0, 84 42, 28 26), +-- (52 18, 66 23, 73 9, 48 6, 52 18) ), +-- ( (59 18, 67 18, 67 13, 59 13, 59 18) ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO forests VALUES(109, 'Green Forest', +-- MultiPolygonFromText('MULTIPOLYGON(((28 26,28 0,84 0,84 42,28 26),(52 18,66 23,73 9,48 6,52 18)),((59 18,67 18,67 13,59 13,59 18)))', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO forests VALUES(109, 'Green Forest', + ST_MPolyFromText('MULTIPOLYGON(((28 26,28 0,84 0,84 42,28 26),(52 18,66 23,73 9,48 6,52 18)),((59 18,67 18,67 13,59 13,59 18)))', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- Bridges +-- +-- We have one bridge. Its geometry is a point. +-- The geometry is described in WKT format as: +-- 'POINT( 44 31 )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO bridges VALUES(110, 'Cam Bridge', +-- PointFromText('POINT( 44 31 )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO bridges VALUES(110, 'Cam Bridge', + ST_PointFromText('POINT( 44 31 )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Streams +-- +-- We have two streams. Their geometries are linestrings. +-- The geometries are described in WKT format as: +-- 'LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )' +-- 'LINESTRING( 76 0, 78 4, 73 9 )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO streams VALUES(111, 'Cam Stream', +-- LineStringFromText('LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )', 101) +-- ); +-- INSERT INTO streams VALUES(112, NULL, +-- LineStringFromText('LINESTRING( 76 0, 78 4, 73 9 )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO streams VALUES(111, 'Cam Stream', + ST_LineFromText('LINESTRING( 38 48, 44 41, 41 36, 44 31, 52 18 )', 101) +); +INSERT INTO streams VALUES(112, NULL, + ST_LineFromText('LINESTRING( 76 0, 78 4, 73 9 )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Buildings +-- +-- We have two buildings. Their geometries are points and polygons. +-- The geometries are described in WKT format as: +-- address '123 Main Street' fid 113 +-- 'POINT( 52 30 )' and +-- 'POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )' +-- address '215 Main Street' fid 114 +-- 'POINT( 64 33 )' and +-- 'POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO buildings VALUES(113, '123 Main Street', +-- PointFromText('POINT( 52 30 )', 101), +-- PolygonFromText('POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )', 101) +-- ); +-- INSERT INTO buildings VALUES(114, '215 Main Street', +-- PointFromText('POINT( 64 33 )', 101), +-- PolygonFromText('POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO buildings VALUES(113, '123 Main Street', + ST_PointFromText('POINT( 52 30 )', 101), + ST_PolyFromText('POLYGON( ( 50 31, 54 31, 54 29, 50 29, 50 31) )', 101) +); +INSERT INTO buildings VALUES(114, '215 Main Street', + ST_PointFromText('POINT( 64 33 )', 101), + ST_PolyFromText('POLYGON( ( 66 34, 62 34, 62 32, 66 32, 66 34) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Ponds +-- +-- We have one pond. Its geometry is a multipolygon. +-- The geometry is described in WKT format as: +-- 'MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO ponds VALUES(120, NULL, 'Stock Pond', +-- MultiPolygonFromText('MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO ponds VALUES(120, NULL, 'Stock Pond', + ST_MPolyFromText('MULTIPOLYGON( ( ( 24 44, 22 42, 24 40, 24 44) ), ( ( 26 44, 26 40, 28 42, 26 44) ) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END + +-- +--================== +-- Named Places +-- +-- We have two named places. Their geometries are polygons. +-- The geometries are described in WKT format as: +-- name 'Ashton' fid 117 +-- 'POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )' +-- address 'Goose Island' fid 118 +-- 'POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO named_places VALUES(117, 'Ashton', +-- PolygonFromText('POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )', 101) +-- ); +-- INSERT INTO named_places VALUES(118, 'Goose Island', +-- PolygonFromText('POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO named_places VALUES(117, 'Ashton', + ST_PolyFromText('POLYGON( ( 62 48, 84 48, 84 30, 56 30, 56 34, 62 48) )', 101) +); +INSERT INTO named_places VALUES(118, 'Goose Island', + ST_PolyFromText('POLYGON( ( 67 13, 67 18, 59 18, 59 13, 67 13) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +--================== +-- Map Neatlines +-- +-- We have one map neatline. Its geometry is a polygon. +-- The geometry is described in WKT format as: +-- 'POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )' +-- +--================== +-- +-- -- !#@ ADAPTATION BEGIN +-- Adds ST_ prefix to routing names +-- --------------------- +-- -- BEGIN ORIGINAL SQL +-- --------------------- +-- INSERT INTO map_neatlines VALUES(115, +-- PolygonFromText('POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )', 101) +-- ); +-- --------------------- +-- -- END ORIGINAL SQL +-- --------------------- +-- -- BEGIN ADAPTED SQL +-- --------------------- +INSERT INTO map_neatlines VALUES(115, + ST_PolyFromText('POLYGON( ( 0 0, 0 48, 84 48, 84 0, 0 0 ) )', 101) +); +-- --------------------- +-- -- END ADAPTED SQL +-- --------------------- +-- -- !#@ ADAPTATION END +-- +-- +-- +-- end sqltsch.sql \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_show_tables.sql b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_show_tables.sql index b65be73066e41..53286eadc5261 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_show_tables.sql +++ b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_show_tables.sql @@ -1,4 +1,5 @@ CREATE TABLE mock ( "name" VARCHAR, - "type" VARCHAR + "type" VARCHAR, + "kind" VARCHAR ); diff --git a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec new file mode 100644 index 0000000000000..c9380fae2809e --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec @@ -0,0 +1,15 @@ +// +// Geo-specific Sys Commands +// + +geoSysColumns +SYS COLUMNS TABLE LIKE 'geo'; + + TABLE_CAT:s | TABLE_SCHEM:s| TABLE_NAME:s | COLUMN_NAME:s | DATA_TYPE:i | TYPE_NAME:s | COLUMN_SIZE:i|BUFFER_LENGTH:i|DECIMAL_DIGITS:i|NUM_PREC_RADIX:i| NULLABLE:i| REMARKS:s | COLUMN_DEF:s |SQL_DATA_TYPE:i|SQL_DATETIME_SUB:i|CHAR_OCTET_LENGTH:i|ORDINAL_POSITION:i|IS_NULLABLE:s|SCOPE_CATALOG:s|SCOPE_SCHEMA:s|SCOPE_TABLE:s|SOURCE_DATA_TYPE:sh|IS_AUTOINCREMENT:s|IS_GENERATEDCOLUMN:s +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |city |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |1 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |4 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO +x-pack_plugin_sql_qa_single-node_integTestCluster|null |geo |shape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |6 |YES |null |null |null |null |NO |NO +; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/slow/frozen.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/slow/frozen.csv-spec new file mode 100644 index 0000000000000..6175bea6034cd --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/slow/frozen.csv-spec @@ -0,0 +1,59 @@ +// +// Frozen indices tests +// + +showTables +SHOW TABLES INCLUDE FROZEN; + + name | type | kind +frozen_emp |BASE TABLE |FROZEN INDEX +logs |BASE TABLE |INDEX +test_alias |VIEW |ALIAS +test_alias_emp |VIEW |ALIAS +test_emp |BASE TABLE |INDEX +test_emp_copy |BASE TABLE |INDEX +; + +columnFromFrozen +SELECT gender FROM FROZEN frozen_emp ORDER BY gender LIMIT 5; + +gender:s +F +F +F +F +F +; + +percentileFrozen +SELECT gender, PERCENTILE(emp_no, 92.45) p1 FROM FROZEN frozen_emp GROUP BY gender; + +gender:s | p1:d +null |10018.745 +F |10098.0085 +M |10091.393 +; + +countFromFrozen +SELECT gender, COUNT(*) AS c FROM FROZEN frozen_emp GROUP BY gender; + +gender:s | c:l +null |10 +F |33 +M |57 +; + +sum +SELECT SUM(salary) FROM FROZEN frozen_emp; + + SUM(salary) +--------------- +4824855 +; + +kurtosisAndSkewnessNoGroup +SELECT KURTOSIS(emp_no) k, SKEWNESS(salary) s FROM FROZEN frozen_emp; + +k:d | s:d +1.7997599759975997 | 0.2707722118423227 +; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/slow/readme.txt b/x-pack/plugin/sql/qa/src/main/resources/slow/readme.txt new file mode 100644 index 0000000000000..a95c75b7665ba --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/slow/readme.txt @@ -0,0 +1,3 @@ +Slow tests are placed in this folder so that they don't get picked up accidently through the classpath discovery. +A good example are frozen tests, which by their nature, take a LOT more time to execute and thus would cause +the other 'normal' tests to time-out. \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.1.jar.sha1 deleted file mode 100644 index 75e809754ecee..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ac3dbf89dbf2ee385185dd0cd3064fe789efee0 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.2.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.2.jar.sha1 new file mode 100644 index 0000000000000..9cbac57161c8e --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.2.jar.sha1 @@ -0,0 +1 @@ +a079fc39ccc3de02acdeb7117443e5d9bd431687 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java index 77f4a4994ed86..06abde5cef4df 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java @@ -33,12 +33,14 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { private static final ObjectParser PARSER = objectParser(SqlQueryRequest::new); static final ParseField COLUMNAR = new ParseField("columnar"); static final ParseField FIELD_MULTI_VALUE_LENIENCY = new ParseField("field_multi_value_leniency"); + static final ParseField INDEX_INCLUDE_FROZEN = new ParseField("index_include_frozen"); static { PARSER.declareString(SqlQueryRequest::cursor, CURSOR); PARSER.declareBoolean(SqlQueryRequest::columnar, COLUMNAR); PARSER.declareBoolean(SqlQueryRequest::fieldMultiValueLeniency, FIELD_MULTI_VALUE_LENIENCY); + PARSER.declareBoolean(SqlQueryRequest::indexIncludeFrozen, INDEX_INCLUDE_FROZEN); } private String cursor = ""; @@ -48,6 +50,7 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { */ private Boolean columnar = Boolean.FALSE; private boolean fieldMultiValueLeniency = Protocol.FIELD_MULTI_VALUE_LENIENCY; + private boolean indexIncludeFrozen = Protocol.INDEX_INCLUDE_FROZEN; public SqlQueryRequest() { super(); @@ -55,11 +58,12 @@ public SqlQueryRequest() { public SqlQueryRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, Boolean columnar, - String cursor, RequestInfo requestInfo, boolean fieldMultiValueLeniency) { + String cursor, RequestInfo requestInfo, boolean fieldMultiValueLeniency, boolean indexIncludeFrozen) { super(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo); this.cursor = cursor; this.columnar = columnar; this.fieldMultiValueLeniency = fieldMultiValueLeniency; + this.indexIncludeFrozen = indexIncludeFrozen; } @Override @@ -115,11 +119,21 @@ public boolean fieldMultiValueLeniency() { return fieldMultiValueLeniency; } + public SqlQueryRequest indexIncludeFrozen(boolean include) { + this.indexIncludeFrozen = include; + return this; + } + + public boolean indexIncludeFrozen() { + return indexIncludeFrozen; + } + public SqlQueryRequest(StreamInput in) throws IOException { super(in); cursor = in.readString(); columnar = in.readOptionalBoolean(); fieldMultiValueLeniency = in.readBoolean(); + indexIncludeFrozen = in.readBoolean(); } @Override @@ -128,11 +142,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(cursor); out.writeOptionalBoolean(columnar); out.writeBoolean(fieldMultiValueLeniency); + out.writeBoolean(indexIncludeFrozen); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), cursor, columnar); + return Objects.hash(super.hashCode(), cursor, columnar, fieldMultiValueLeniency, indexIncludeFrozen); } @Override @@ -140,7 +155,8 @@ public boolean equals(Object obj) { return super.equals(obj) && Objects.equals(cursor, ((SqlQueryRequest) obj).cursor) && Objects.equals(columnar, ((SqlQueryRequest) obj).columnar) - && fieldMultiValueLeniency == ((SqlQueryRequest) obj).fieldMultiValueLeniency; + && fieldMultiValueLeniency == ((SqlQueryRequest) obj).fieldMultiValueLeniency + && indexIncludeFrozen == ((SqlQueryRequest) obj).indexIncludeFrozen; } @Override @@ -152,7 +168,7 @@ public String getDescription() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // This is needed just to test round-trip compatibility with proto.SqlQueryRequest return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(query(), params(), zoneId(), fetchSize(), requestTimeout(), - pageTimeout(), filter(), columnar(), cursor(), requestInfo(), fieldMultiValueLeniency()) + pageTimeout(), filter(), columnar(), cursor(), requestInfo(), fieldMultiValueLeniency(), indexIncludeFrozen()) .toXContent(builder, params); } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java index 71f2774def97e..f2ede4b463cdf 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java @@ -25,15 +25,16 @@ public class SqlQueryRequestBuilder extends ActionRequestBuilder params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, boolean columnar, String nextPageInfo, RequestInfo requestInfo, - boolean multiValueFieldLeniency) { + boolean multiValueFieldLeniency, boolean indexIncludeFrozen) { super(client, action, new SqlQueryRequest(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, columnar, - nextPageInfo, requestInfo, multiValueFieldLeniency)); + nextPageInfo, requestInfo, multiValueFieldLeniency, indexIncludeFrozen)); } public SqlQueryRequestBuilder query(String query) { diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java index 009d0823d7c8a..0679283411667 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequest.java @@ -69,7 +69,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws null, null, requestInfo(), + false, false).toXContent(builder, params); - } } diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java index ce33f9a42e90d..992f55e5a3b00 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestTests.java @@ -56,7 +56,7 @@ protected SqlQueryRequest createTestInstance() { return new SqlQueryRequest(randomAlphaOfLength(10), randomParameters(), SqlTestUtils.randomFilterOrNull(random()), randomZone(), between(1, Integer.MAX_VALUE), randomTV(), randomTV(), randomBoolean(), randomAlphaOfLength(10), requestInfo, - randomBoolean() + randomBoolean(), randomBoolean() ); } @@ -115,7 +115,7 @@ protected SqlQueryRequest mutateInstance(SqlQueryRequest instance) { ); SqlQueryRequest newRequest = new SqlQueryRequest(instance.query(), instance.params(), instance.filter(), instance.zoneId(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.columnar(), - instance.cursor(), instance.requestInfo(), instance.fieldMultiValueLeniency()); + instance.cursor(), instance.requestInfo(), instance.fieldMultiValueLeniency(), instance.indexIncludeFrozen()); mutator.accept(newRequest); return newRequest; } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 24dfcf76ef11e..f93a3042da337 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -64,13 +64,14 @@ public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLExcept // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI SqlQueryRequest sqlRequest = new SqlQueryRequest(query, Collections.emptyList(), Protocol.TIME_ZONE, - fetchSize, - TimeValue.timeValueMillis(cfg.queryTimeout()), + fetchSize, + TimeValue.timeValueMillis(cfg.queryTimeout()), TimeValue.timeValueMillis(cfg.pageTimeout()), null, Boolean.FALSE, null, new RequestInfo(Mode.CLI), + false, false); return query(sqlRequest); } diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java index 13471afe2212f..29238bf4064a5 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java @@ -23,6 +23,7 @@ public final class Protocol { public static final TimeValue REQUEST_TIMEOUT = TimeValue.timeValueSeconds(90); public static final TimeValue PAGE_TIMEOUT = TimeValue.timeValueSeconds(45); public static final boolean FIELD_MULTI_VALUE_LENIENCY = false; + public static final boolean INDEX_INCLUDE_FROZEN = false; /** * SQL-related endpoints diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java index ec027a55a9365..15aa9566a489f 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java @@ -33,12 +33,11 @@ public class SqlQueryRequest extends AbstractSqlRequest { private final Boolean columnar; private final List params; private final boolean fieldMultiValueLeniency; - + private final boolean indexIncludeFrozen; public SqlQueryRequest(String query, List params, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, ToXContent filter, Boolean columnar, - String cursor, RequestInfo requestInfo, - boolean fieldMultiValueLeniency) { + String cursor, RequestInfo requestInfo, boolean fieldMultiValueLeniency, boolean indexIncludeFrozen) { super(requestInfo); this.query = query; this.params = params; @@ -50,11 +49,12 @@ public SqlQueryRequest(String query, List params, ZoneId zon this.columnar = columnar; this.cursor = cursor; this.fieldMultiValueLeniency = fieldMultiValueLeniency; + this.indexIncludeFrozen = indexIncludeFrozen; } public SqlQueryRequest(String cursor, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { this("", Collections.emptyList(), Protocol.TIME_ZONE, Protocol.FETCH_SIZE, requestTimeout, pageTimeout, - null, false, cursor, requestInfo, Protocol.FIELD_MULTI_VALUE_LENIENCY); + null, false, cursor, requestInfo, Protocol.FIELD_MULTI_VALUE_LENIENCY, Protocol.INDEX_INCLUDE_FROZEN); } /** @@ -127,6 +127,10 @@ public boolean fieldMultiValueLeniency() { return fieldMultiValueLeniency; } + public boolean indexIncludeFrozen() { + return indexIncludeFrozen; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -148,13 +152,14 @@ public boolean equals(Object o) { Objects.equals(filter, that.filter) && Objects.equals(columnar, that.columnar) && Objects.equals(cursor, that.cursor) && - fieldMultiValueLeniency == that.fieldMultiValueLeniency; + fieldMultiValueLeniency == that.fieldMultiValueLeniency && + indexIncludeFrozen == that.indexIncludeFrozen; } @Override public int hashCode() { return Objects.hash(super.hashCode(), query, zoneId, fetchSize, requestTimeout, pageTimeout, - filter, columnar, cursor, fieldMultiValueLeniency); + filter, columnar, cursor, fieldMultiValueLeniency, indexIncludeFrozen); } @Override @@ -195,6 +200,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (fieldMultiValueLeniency) { builder.field("field_multi_value_leniency", fieldMultiValueLeniency); } + if (indexIncludeFrozen) { + builder.field("index_include_frozen", indexIncludeFrozen); + } if (cursor != null) { builder.field("cursor", cursor); } diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index f60610fc75119..76af159be902f 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -50,9 +50,9 @@ statement )* ')')? statement #debug - | SHOW TABLES (tableLike=likePattern | tableIdent=tableIdentifier)? #showTables - | SHOW COLUMNS (FROM | IN) (tableLike=likePattern | tableIdent=tableIdentifier) #showColumns - | (DESCRIBE | DESC) (tableLike=likePattern | tableIdent=tableIdentifier) #showColumns + | SHOW TABLES (INCLUDE FROZEN)? (tableLike=likePattern | tableIdent=tableIdentifier)? #showTables + | SHOW COLUMNS (INCLUDE FROZEN)? (FROM | IN) (tableLike=likePattern | tableIdent=tableIdentifier) #showColumns + | (DESCRIBE | DESC) (INCLUDE FROZEN)? (tableLike=likePattern | tableIdent=tableIdentifier) #showColumns | SHOW FUNCTIONS (likePattern)? #showFunctions | SHOW SCHEMAS #showSchemas | SYS TABLES (CATALOG clusterLike=likePattern)? @@ -149,7 +149,7 @@ joinCriteria ; relationPrimary - : tableIdentifier (AS? qualifiedName)? #tableName + : FROZEN? tableIdentifier (AS? qualifiedName)? #tableName | '(' queryNoWith ')' (AS? qualifiedName)? #aliasedQuery | '(' relation ')' (AS? qualifiedName)? #aliasedRelation ; @@ -399,6 +399,7 @@ FALSE: 'FALSE'; FIRST: 'FIRST'; FORMAT: 'FORMAT'; FROM: 'FROM'; +FROZEN: 'FROZEN'; FULL: 'FULL'; FUNCTIONS: 'FUNCTIONS'; GRAPHVIZ: 'GRAPHVIZ'; @@ -407,6 +408,7 @@ HAVING: 'HAVING'; HOUR: 'HOUR'; HOURS: 'HOURS'; IN: 'IN'; +INCLUDE: 'INCLUDE'; INNER: 'INNER'; INTERVAL: 'INTERVAL'; IS: 'IS'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index 0b24423bbee54..7eeec75f9c928 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -37,101 +37,103 @@ FALSE=36 FIRST=37 FORMAT=38 FROM=39 -FULL=40 -FUNCTIONS=41 -GRAPHVIZ=42 -GROUP=43 -HAVING=44 -HOUR=45 -HOURS=46 -IN=47 -INNER=48 -INTERVAL=49 -IS=50 -JOIN=51 -LAST=52 -LEFT=53 -LIKE=54 -LIMIT=55 -MAPPED=56 -MATCH=57 -MINUTE=58 -MINUTES=59 -MONTH=60 -MONTHS=61 -NATURAL=62 -NOT=63 -NULL=64 -NULLS=65 -ON=66 -OPTIMIZED=67 -OR=68 -ORDER=69 -OUTER=70 -PARSED=71 -PHYSICAL=72 -PLAN=73 -RIGHT=74 -RLIKE=75 -QUERY=76 -SCHEMAS=77 -SECOND=78 -SECONDS=79 -SELECT=80 -SHOW=81 -SYS=82 -TABLE=83 -TABLES=84 -TEXT=85 -THEN=86 -TRUE=87 -TO=88 -TYPE=89 -TYPES=90 -USING=91 -VERIFY=92 -WHEN=93 -WHERE=94 -WITH=95 -YEAR=96 -YEARS=97 -ESCAPE_ESC=98 -FUNCTION_ESC=99 -LIMIT_ESC=100 -DATE_ESC=101 -TIME_ESC=102 -TIMESTAMP_ESC=103 -GUID_ESC=104 -ESC_END=105 -EQ=106 -NULLEQ=107 -NEQ=108 -LT=109 -LTE=110 -GT=111 -GTE=112 -PLUS=113 -MINUS=114 -ASTERISK=115 -SLASH=116 -PERCENT=117 -CAST_OP=118 -CONCAT=119 -DOT=120 -PARAM=121 -STRING=122 -INTEGER_VALUE=123 -DECIMAL_VALUE=124 -IDENTIFIER=125 -DIGIT_IDENTIFIER=126 -TABLE_IDENTIFIER=127 -QUOTED_IDENTIFIER=128 -BACKQUOTED_IDENTIFIER=129 -SIMPLE_COMMENT=130 -BRACKETED_COMMENT=131 -WS=132 -UNRECOGNIZED=133 -DELIMITER=134 +FROZEN=40 +FULL=41 +FUNCTIONS=42 +GRAPHVIZ=43 +GROUP=44 +HAVING=45 +HOUR=46 +HOURS=47 +IN=48 +INCLUDE=49 +INNER=50 +INTERVAL=51 +IS=52 +JOIN=53 +LAST=54 +LEFT=55 +LIKE=56 +LIMIT=57 +MAPPED=58 +MATCH=59 +MINUTE=60 +MINUTES=61 +MONTH=62 +MONTHS=63 +NATURAL=64 +NOT=65 +NULL=66 +NULLS=67 +ON=68 +OPTIMIZED=69 +OR=70 +ORDER=71 +OUTER=72 +PARSED=73 +PHYSICAL=74 +PLAN=75 +RIGHT=76 +RLIKE=77 +QUERY=78 +SCHEMAS=79 +SECOND=80 +SECONDS=81 +SELECT=82 +SHOW=83 +SYS=84 +TABLE=85 +TABLES=86 +TEXT=87 +THEN=88 +TRUE=89 +TO=90 +TYPE=91 +TYPES=92 +USING=93 +VERIFY=94 +WHEN=95 +WHERE=96 +WITH=97 +YEAR=98 +YEARS=99 +ESCAPE_ESC=100 +FUNCTION_ESC=101 +LIMIT_ESC=102 +DATE_ESC=103 +TIME_ESC=104 +TIMESTAMP_ESC=105 +GUID_ESC=106 +ESC_END=107 +EQ=108 +NULLEQ=109 +NEQ=110 +LT=111 +LTE=112 +GT=113 +GTE=114 +PLUS=115 +MINUS=116 +ASTERISK=117 +SLASH=118 +PERCENT=119 +CAST_OP=120 +CONCAT=121 +DOT=122 +PARAM=123 +STRING=124 +INTEGER_VALUE=125 +DECIMAL_VALUE=126 +IDENTIFIER=127 +DIGIT_IDENTIFIER=128 +TABLE_IDENTIFIER=129 +QUOTED_IDENTIFIER=130 +BACKQUOTED_IDENTIFIER=131 +SIMPLE_COMMENT=132 +BRACKETED_COMMENT=133 +WS=134 +UNRECOGNIZED=135 +DELIMITER=136 '('=1 ')'=2 ','=3 @@ -171,84 +173,86 @@ DELIMITER=134 'FIRST'=37 'FORMAT'=38 'FROM'=39 -'FULL'=40 -'FUNCTIONS'=41 -'GRAPHVIZ'=42 -'GROUP'=43 -'HAVING'=44 -'HOUR'=45 -'HOURS'=46 -'IN'=47 -'INNER'=48 -'INTERVAL'=49 -'IS'=50 -'JOIN'=51 -'LAST'=52 -'LEFT'=53 -'LIKE'=54 -'LIMIT'=55 -'MAPPED'=56 -'MATCH'=57 -'MINUTE'=58 -'MINUTES'=59 -'MONTH'=60 -'MONTHS'=61 -'NATURAL'=62 -'NOT'=63 -'NULL'=64 -'NULLS'=65 -'ON'=66 -'OPTIMIZED'=67 -'OR'=68 -'ORDER'=69 -'OUTER'=70 -'PARSED'=71 -'PHYSICAL'=72 -'PLAN'=73 -'RIGHT'=74 -'RLIKE'=75 -'QUERY'=76 -'SCHEMAS'=77 -'SECOND'=78 -'SECONDS'=79 -'SELECT'=80 -'SHOW'=81 -'SYS'=82 -'TABLE'=83 -'TABLES'=84 -'TEXT'=85 -'THEN'=86 -'TRUE'=87 -'TO'=88 -'TYPE'=89 -'TYPES'=90 -'USING'=91 -'VERIFY'=92 -'WHEN'=93 -'WHERE'=94 -'WITH'=95 -'YEAR'=96 -'YEARS'=97 -'{ESCAPE'=98 -'{FN'=99 -'{LIMIT'=100 -'{D'=101 -'{T'=102 -'{TS'=103 -'{GUID'=104 -'}'=105 -'='=106 -'<=>'=107 -'<'=109 -'<='=110 -'>'=111 -'>='=112 -'+'=113 -'-'=114 -'*'=115 -'/'=116 -'%'=117 -'::'=118 -'||'=119 -'.'=120 -'?'=121 +'FROZEN'=40 +'FULL'=41 +'FUNCTIONS'=42 +'GRAPHVIZ'=43 +'GROUP'=44 +'HAVING'=45 +'HOUR'=46 +'HOURS'=47 +'IN'=48 +'INCLUDE'=49 +'INNER'=50 +'INTERVAL'=51 +'IS'=52 +'JOIN'=53 +'LAST'=54 +'LEFT'=55 +'LIKE'=56 +'LIMIT'=57 +'MAPPED'=58 +'MATCH'=59 +'MINUTE'=60 +'MINUTES'=61 +'MONTH'=62 +'MONTHS'=63 +'NATURAL'=64 +'NOT'=65 +'NULL'=66 +'NULLS'=67 +'ON'=68 +'OPTIMIZED'=69 +'OR'=70 +'ORDER'=71 +'OUTER'=72 +'PARSED'=73 +'PHYSICAL'=74 +'PLAN'=75 +'RIGHT'=76 +'RLIKE'=77 +'QUERY'=78 +'SCHEMAS'=79 +'SECOND'=80 +'SECONDS'=81 +'SELECT'=82 +'SHOW'=83 +'SYS'=84 +'TABLE'=85 +'TABLES'=86 +'TEXT'=87 +'THEN'=88 +'TRUE'=89 +'TO'=90 +'TYPE'=91 +'TYPES'=92 +'USING'=93 +'VERIFY'=94 +'WHEN'=95 +'WHERE'=96 +'WITH'=97 +'YEAR'=98 +'YEARS'=99 +'{ESCAPE'=100 +'{FN'=101 +'{LIMIT'=102 +'{D'=103 +'{T'=104 +'{TS'=105 +'{GUID'=106 +'}'=107 +'='=108 +'<=>'=109 +'<'=111 +'<='=112 +'>'=113 +'>='=114 +'+'=115 +'-'=116 +'*'=117 +'/'=118 +'%'=119 +'::'=120 +'||'=121 +'.'=122 +'?'=123 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index 21925952a2e34..603e67fec88c1 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -37,100 +37,102 @@ FALSE=36 FIRST=37 FORMAT=38 FROM=39 -FULL=40 -FUNCTIONS=41 -GRAPHVIZ=42 -GROUP=43 -HAVING=44 -HOUR=45 -HOURS=46 -IN=47 -INNER=48 -INTERVAL=49 -IS=50 -JOIN=51 -LAST=52 -LEFT=53 -LIKE=54 -LIMIT=55 -MAPPED=56 -MATCH=57 -MINUTE=58 -MINUTES=59 -MONTH=60 -MONTHS=61 -NATURAL=62 -NOT=63 -NULL=64 -NULLS=65 -ON=66 -OPTIMIZED=67 -OR=68 -ORDER=69 -OUTER=70 -PARSED=71 -PHYSICAL=72 -PLAN=73 -RIGHT=74 -RLIKE=75 -QUERY=76 -SCHEMAS=77 -SECOND=78 -SECONDS=79 -SELECT=80 -SHOW=81 -SYS=82 -TABLE=83 -TABLES=84 -TEXT=85 -THEN=86 -TRUE=87 -TO=88 -TYPE=89 -TYPES=90 -USING=91 -VERIFY=92 -WHEN=93 -WHERE=94 -WITH=95 -YEAR=96 -YEARS=97 -ESCAPE_ESC=98 -FUNCTION_ESC=99 -LIMIT_ESC=100 -DATE_ESC=101 -TIME_ESC=102 -TIMESTAMP_ESC=103 -GUID_ESC=104 -ESC_END=105 -EQ=106 -NULLEQ=107 -NEQ=108 -LT=109 -LTE=110 -GT=111 -GTE=112 -PLUS=113 -MINUS=114 -ASTERISK=115 -SLASH=116 -PERCENT=117 -CAST_OP=118 -CONCAT=119 -DOT=120 -PARAM=121 -STRING=122 -INTEGER_VALUE=123 -DECIMAL_VALUE=124 -IDENTIFIER=125 -DIGIT_IDENTIFIER=126 -TABLE_IDENTIFIER=127 -QUOTED_IDENTIFIER=128 -BACKQUOTED_IDENTIFIER=129 -SIMPLE_COMMENT=130 -BRACKETED_COMMENT=131 -WS=132 -UNRECOGNIZED=133 +FROZEN=40 +FULL=41 +FUNCTIONS=42 +GRAPHVIZ=43 +GROUP=44 +HAVING=45 +HOUR=46 +HOURS=47 +IN=48 +INCLUDE=49 +INNER=50 +INTERVAL=51 +IS=52 +JOIN=53 +LAST=54 +LEFT=55 +LIKE=56 +LIMIT=57 +MAPPED=58 +MATCH=59 +MINUTE=60 +MINUTES=61 +MONTH=62 +MONTHS=63 +NATURAL=64 +NOT=65 +NULL=66 +NULLS=67 +ON=68 +OPTIMIZED=69 +OR=70 +ORDER=71 +OUTER=72 +PARSED=73 +PHYSICAL=74 +PLAN=75 +RIGHT=76 +RLIKE=77 +QUERY=78 +SCHEMAS=79 +SECOND=80 +SECONDS=81 +SELECT=82 +SHOW=83 +SYS=84 +TABLE=85 +TABLES=86 +TEXT=87 +THEN=88 +TRUE=89 +TO=90 +TYPE=91 +TYPES=92 +USING=93 +VERIFY=94 +WHEN=95 +WHERE=96 +WITH=97 +YEAR=98 +YEARS=99 +ESCAPE_ESC=100 +FUNCTION_ESC=101 +LIMIT_ESC=102 +DATE_ESC=103 +TIME_ESC=104 +TIMESTAMP_ESC=105 +GUID_ESC=106 +ESC_END=107 +EQ=108 +NULLEQ=109 +NEQ=110 +LT=111 +LTE=112 +GT=113 +GTE=114 +PLUS=115 +MINUS=116 +ASTERISK=117 +SLASH=118 +PERCENT=119 +CAST_OP=120 +CONCAT=121 +DOT=122 +PARAM=123 +STRING=124 +INTEGER_VALUE=125 +DECIMAL_VALUE=126 +IDENTIFIER=127 +DIGIT_IDENTIFIER=128 +TABLE_IDENTIFIER=129 +QUOTED_IDENTIFIER=130 +BACKQUOTED_IDENTIFIER=131 +SIMPLE_COMMENT=132 +BRACKETED_COMMENT=133 +WS=134 +UNRECOGNIZED=135 '('=1 ')'=2 ','=3 @@ -170,84 +172,86 @@ UNRECOGNIZED=133 'FIRST'=37 'FORMAT'=38 'FROM'=39 -'FULL'=40 -'FUNCTIONS'=41 -'GRAPHVIZ'=42 -'GROUP'=43 -'HAVING'=44 -'HOUR'=45 -'HOURS'=46 -'IN'=47 -'INNER'=48 -'INTERVAL'=49 -'IS'=50 -'JOIN'=51 -'LAST'=52 -'LEFT'=53 -'LIKE'=54 -'LIMIT'=55 -'MAPPED'=56 -'MATCH'=57 -'MINUTE'=58 -'MINUTES'=59 -'MONTH'=60 -'MONTHS'=61 -'NATURAL'=62 -'NOT'=63 -'NULL'=64 -'NULLS'=65 -'ON'=66 -'OPTIMIZED'=67 -'OR'=68 -'ORDER'=69 -'OUTER'=70 -'PARSED'=71 -'PHYSICAL'=72 -'PLAN'=73 -'RIGHT'=74 -'RLIKE'=75 -'QUERY'=76 -'SCHEMAS'=77 -'SECOND'=78 -'SECONDS'=79 -'SELECT'=80 -'SHOW'=81 -'SYS'=82 -'TABLE'=83 -'TABLES'=84 -'TEXT'=85 -'THEN'=86 -'TRUE'=87 -'TO'=88 -'TYPE'=89 -'TYPES'=90 -'USING'=91 -'VERIFY'=92 -'WHEN'=93 -'WHERE'=94 -'WITH'=95 -'YEAR'=96 -'YEARS'=97 -'{ESCAPE'=98 -'{FN'=99 -'{LIMIT'=100 -'{D'=101 -'{T'=102 -'{TS'=103 -'{GUID'=104 -'}'=105 -'='=106 -'<=>'=107 -'<'=109 -'<='=110 -'>'=111 -'>='=112 -'+'=113 -'-'=114 -'*'=115 -'/'=116 -'%'=117 -'::'=118 -'||'=119 -'.'=120 -'?'=121 +'FROZEN'=40 +'FULL'=41 +'FUNCTIONS'=42 +'GRAPHVIZ'=43 +'GROUP'=44 +'HAVING'=45 +'HOUR'=46 +'HOURS'=47 +'IN'=48 +'INCLUDE'=49 +'INNER'=50 +'INTERVAL'=51 +'IS'=52 +'JOIN'=53 +'LAST'=54 +'LEFT'=55 +'LIKE'=56 +'LIMIT'=57 +'MAPPED'=58 +'MATCH'=59 +'MINUTE'=60 +'MINUTES'=61 +'MONTH'=62 +'MONTHS'=63 +'NATURAL'=64 +'NOT'=65 +'NULL'=66 +'NULLS'=67 +'ON'=68 +'OPTIMIZED'=69 +'OR'=70 +'ORDER'=71 +'OUTER'=72 +'PARSED'=73 +'PHYSICAL'=74 +'PLAN'=75 +'RIGHT'=76 +'RLIKE'=77 +'QUERY'=78 +'SCHEMAS'=79 +'SECOND'=80 +'SECONDS'=81 +'SELECT'=82 +'SHOW'=83 +'SYS'=84 +'TABLE'=85 +'TABLES'=86 +'TEXT'=87 +'THEN'=88 +'TRUE'=89 +'TO'=90 +'TYPE'=91 +'TYPES'=92 +'USING'=93 +'VERIFY'=94 +'WHEN'=95 +'WHERE'=96 +'WITH'=97 +'YEAR'=98 +'YEARS'=99 +'{ESCAPE'=100 +'{FN'=101 +'{LIMIT'=102 +'{D'=103 +'{T'=104 +'{TS'=105 +'{GUID'=106 +'}'=107 +'='=108 +'<=>'=109 +'<'=111 +'<='=112 +'>'=113 +'>='=114 +'+'=115 +'-'=116 +'*'=117 +'/'=118 +'%'=119 +'::'=120 +'||'=121 +'.'=122 +'?'=123 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 65a9410941b17..40a34dcf006b0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -288,11 +288,11 @@ private class ResolveTable extends AnalyzeRule { protected LogicalPlan rule(UnresolvedRelation plan) { TableIdentifier table = plan.table(); if (indexResolution.isValid() == false) { - return plan.unresolvedMessage().equals(indexResolution.toString()) ? plan : new UnresolvedRelation(plan.source(), - plan.table(), plan.alias(), indexResolution.toString()); + return plan.unresolvedMessage().equals(indexResolution.toString()) ? plan : + new UnresolvedRelation(plan.source(), plan.table(), plan.alias(), plan.frozen(), indexResolution.toString()); } assert indexResolution.matches(table.index()); - LogicalPlan logicalPlan = new EsRelation(plan.source(), indexResolution.get()); + LogicalPlan logicalPlan = new EsRelation(plan.source(), indexResolution.get(), plan.frozen()); SubQueryAlias sa = new SubQueryAlias(plan.source(), logicalPlan, table.index()); if (plan.alias() != null) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java index 68b73cf3a019f..16e35cd8638a2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzer.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.sql.analysis.analyzer; -import org.elasticsearch.xpack.sql.plan.TableIdentifier; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation; @@ -23,9 +22,9 @@ public class PreAnalyzer { public static class PreAnalysis { public static final PreAnalysis EMPTY = new PreAnalysis(emptyList()); - public final List indices; + public final List indices; - PreAnalysis(List indices) { + PreAnalysis(List indices) { this.indices = indices; } } @@ -39,9 +38,9 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { } private PreAnalysis doPreAnalyze(LogicalPlan plan) { - List indices = new ArrayList<>(); + List indices = new ArrayList<>(); - plan.forEachUp(p -> indices.add(p.table()), UnresolvedRelation.class); + plan.forEachUp(p -> indices.add(new TableInfo(p.table(), p.frozen())), UnresolvedRelation.class); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/TableInfo.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/TableInfo.java new file mode 100644 index 0000000000000..479b094fad5bc --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/TableInfo.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.analysis.analyzer; + +import org.elasticsearch.xpack.sql.plan.TableIdentifier; + +public class TableInfo { + + private final TableIdentifier id; + private final boolean isFrozen; + + TableInfo(TableIdentifier id, boolean isFrozen) { + this.id = id; + this.isFrozen = isFrozen; + } + + public TableIdentifier id() { + return id; + } + + public boolean isFrozen() { + return isFrozen; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index db84a444f5794..d5a4cb436e6a5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -63,6 +63,7 @@ import static org.elasticsearch.xpack.sql.stats.FeatureMetric.LOCAL; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.ORDERBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.WHERE; +import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; /** * The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check. @@ -131,7 +132,6 @@ Collection verify(LogicalPlan plan) { // start bottom-up plan.forEachUp(p -> { - if (p.analyzed()) { return; } @@ -236,6 +236,7 @@ Collection verify(LogicalPlan plan) { checkForScoreInsideFunctions(p, localFailures); checkNestedUsedInGroupByOrHaving(p, localFailures); + checkForGeoFunctionsOnDocValues(p, localFailures); // everything checks out // mark the plan as analyzed @@ -719,4 +720,33 @@ private static void checkNestedUsedInGroupByOrHaving(LogicalPlan p, Set fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); } } + + /** + * Makes sure that geo shapes do not appear in filter, aggregation and sorting contexts + */ + private static void checkForGeoFunctionsOnDocValues(LogicalPlan p, Set localFailures) { + + p.forEachDown(f -> { + f.condition().forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used for filtering")); + } + }, FieldAttribute.class); + }, Filter.class); + + // geo shape fields shouldn't be used in aggregates or having (yet) + p.forEachDown(a -> a.groupings().forEach(agg -> agg.forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used in grouping")); + } + }, FieldAttribute.class)), Aggregate.class); + + + // geo shape fields shouldn't be used in order by clauses + p.forEachDown(o -> o.order().forEach(agg -> agg.forEachUp(fa -> { + if (fa.field().getDataType() == GEO_SHAPE) { + localFailures.add(fail(fa, "geo shapes cannot be used for sorting")); + } + }, FieldAttribute.class)), OrderBy.class); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 8f51fa65b7463..2fb5028e987e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DateEsField; @@ -42,7 +43,6 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; @@ -57,38 +57,39 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.elasticsearch.action.ActionListener.wrap; public class IndexResolver { public enum IndexType { - - INDEX("BASE TABLE"), - ALIAS("VIEW"), + STANDARD_INDEX("BASE TABLE", "INDEX"), + ALIAS("VIEW", "ALIAS"), + FROZEN_INDEX("BASE TABLE", "FROZEN INDEX"), // value for user types unrecognized - UNKNOWN("UNKNOWN"); + UNKNOWN("UNKNOWN", "UNKNOWN"); + + public static final String SQL_BASE_TABLE = "BASE TABLE"; + public static final String SQL_TABLE = "TABLE"; + public static final String SQL_VIEW = "VIEW"; - public static final EnumSet VALID = EnumSet.of(INDEX, ALIAS); + public static final EnumSet VALID_INCLUDE_FROZEN = EnumSet.of(STANDARD_INDEX, ALIAS, FROZEN_INDEX); + public static final EnumSet VALID_REGULAR = EnumSet.of(STANDARD_INDEX, ALIAS); + public static final EnumSet INDICES_ONLY = EnumSet.of(STANDARD_INDEX, FROZEN_INDEX); private final String toSql; + private final String toNative; - IndexType(String sql) { + IndexType(String sql, String toNative) { this.toSql = sql; + this.toNative = toNative; } public String toSql() { return toSql; } - - public static IndexType from(String name) { - if (name != null) { - name = name.toUpperCase(Locale.ROOT); - for (IndexType type : IndexType.VALID) { - if (type.toSql.equals(name)) { - return type; - } - } - } - return IndexType.UNKNOWN; + + public String toNative() { + return toNative; } } @@ -136,7 +137,17 @@ public boolean equals(Object obj) { } private static final IndicesOptions INDICES_ONLY_OPTIONS = new IndicesOptions( + EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES, Option.IGNORE_THROTTLED), + EnumSet.of(WildcardStates.OPEN)); + private static final IndicesOptions FROZEN_INDICES_OPTIONS = new IndicesOptions( EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES), EnumSet.of(WildcardStates.OPEN)); + + public static final IndicesOptions FIELD_CAPS_INDICES_OPTIONS = new IndicesOptions( + EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_THROTTLED), EnumSet.of(WildcardStates.OPEN)); + public static final IndicesOptions FIELD_CAPS_FROZEN_INDICES_OPTIONS = new IndicesOptions( + EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE), EnumSet.of(WildcardStates.OPEN)); + + private static final List FIELD_NAMES_BLACKLIST = Arrays.asList("_size"); private static final String UNMAPPED = "unmapped"; @@ -161,7 +172,8 @@ public void resolveNames(String indexWildcard, String javaRegex, EnumSet - resolveIndices(indices, javaRegex, aliases, retrieveIndices, listener), + client.admin().indices().getAliases(aliasRequest, wrap(aliases -> + resolveIndices(indices, javaRegex, aliases, retrieveIndices, retrieveFrozenIndices, listener), ex -> { // with security, two exception can be thrown: // INFE - if no alias matches @@ -179,36 +191,46 @@ public void resolveNames(String indexWildcard, String javaRegex, EnumSet> listener) { + boolean retrieveIndices, boolean retrieveFrozenIndices, ActionListener> listener) { - if (retrieveIndices) { + if (retrieveIndices || retrieveFrozenIndices) { + GetIndexRequest indexRequest = new GetIndexRequest() .local(true) .indices(indices) .features(Feature.SETTINGS) .includeDefaults(false) .indicesOptions(INDICES_ONLY_OPTIONS); + + // if frozen indices are requested, make sure to update the request accordingly + if (retrieveFrozenIndices) { + indexRequest.indicesOptions(FROZEN_INDICES_OPTIONS); + } client.admin().indices().getIndex(indexRequest, - ActionListener.wrap(response -> filterResults(javaRegex, aliases, response, listener), + wrap(response -> filterResults(javaRegex, aliases, response, retrieveIndices, retrieveFrozenIndices, listener), listener::onFailure)); + } else { - filterResults(javaRegex, aliases, null, listener); + filterResults(javaRegex, aliases, null, false, false, listener); } } private void filterResults(String javaRegex, GetAliasesResponse aliases, GetIndexResponse indices, + // these are needed to filter out the different results from the same index response + boolean retrieveIndices, + boolean retrieveFrozenIndices, ActionListener> listener) { // since the index name does not support ?, filter the results manually @@ -226,12 +248,16 @@ private void filterResults(String javaRegex, GetAliasesResponse aliases, GetInde } } } + // filter indices (if present) String[] indicesNames = indices != null ? indices.indices() : null; if (indicesNames != null) { for (String indexName : indicesNames) { + boolean isFrozen = retrieveFrozenIndices + && IndexSettings.INDEX_SEARCH_THROTTLED.get(indices.getSettings().get(indexName)) == Boolean.TRUE; + if (pattern == null || pattern.matcher(indexName).matches()) { - result.add(new IndexInfo(indexName, IndexType.INDEX)); + result.add(new IndexInfo(indexName, isFrozen ? IndexType.FROZEN_INDEX : IndexType.STANDARD_INDEX)); } } } @@ -242,8 +268,9 @@ private void filterResults(String javaRegex, GetAliasesResponse aliases, GetInde /** * Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping. */ - public void resolveAsMergedMapping(String indexWildcard, String javaRegex, ActionListener listener) { - FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard); + public void resolveAsMergedMapping(String indexWildcard, String javaRegex, boolean includeFrozen, + ActionListener listener) { + FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard, includeFrozen); client.fieldCaps(fieldRequest, ActionListener.wrap( response -> listener.onResponse(mergedMappings(indexWildcard, response.getIndices(), response.get())), @@ -287,7 +314,7 @@ static IndexResolution mergedMappings(String indexPattern, String[] indexNames, // type is okay, check aggregation else { FieldCapabilities fieldCap = types.values().iterator().next(); - + // validate search/agg-able if (fieldCap.isAggregatable() && fieldCap.nonAggregatableIndices() != null) { errorMessage.append("mapped as aggregatable except in "); @@ -305,16 +332,16 @@ static IndexResolution mergedMappings(String indexPattern, String[] indexNames, return new InvalidMappedField(n, errorMessage.toString()); } } - + // everything checks return null; }); - + if (indices.size() != 1) { throw new SqlIllegalArgumentException("Incorrect merging of mappings (likely due to a bug) - expect 1 but found [{}]", indices.size()); } - + return IndexResolution.valid(indices.get(0)); } @@ -356,7 +383,7 @@ private static EsField createField(String fieldName, Map> listener) { - FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard); + public void resolveAsSeparateMappings(String indexWildcard, String javaRegex, boolean includeFrozen, + ActionListener> listener) { + FieldCapabilitiesRequest fieldRequest = createFieldCapsRequest(indexWildcard, includeFrozen); client.fieldCaps(fieldRequest, ActionListener.wrap( response -> listener.onResponse(separateMappings(indexWildcard, javaRegex, response.getIndices(), response.get())), listener::onFailure)); - + } static List separateMappings(String indexPattern, String javaRegex, String[] indexNames, @@ -430,7 +467,7 @@ private static List buildIndices(String[] indexNames, String javaRegex, for (Entry> entry : sortedFields) { String fieldName = entry.getKey(); Map types = entry.getValue(); - + // ignore size added by the mapper plugin if (FIELD_NAMES_BLACKLIST.contains(fieldName)) { continue; @@ -438,7 +475,7 @@ private static List buildIndices(String[] indexNames, String javaRegex, // apply verification final InvalidMappedField invalidField = validityVerifier.apply(fieldName, types); - + // filter meta fields and unmapped FieldCapabilities unmapped = types.get(UNMAPPED); Set unmappedIndices = unmapped != null ? new HashSet<>(asList(unmapped.indices())) : emptySet(); @@ -447,7 +484,7 @@ private static List buildIndices(String[] indexNames, String javaRegex, for (Entry typeEntry : types.entrySet()) { FieldCapabilities typeCap = typeEntry.getValue(); String[] capIndices = typeCap.indices(); - + // Skip internal fields (name starting with underscore and its type reported by field_caps starts // with underscore as well). A meta field named "_version", for example, has the type named "_version". if (typeEntry.getKey().startsWith("_") && typeCap.getType().startsWith("_")) { @@ -483,9 +520,9 @@ private static List buildIndices(String[] indexNames, String javaRegex, } EsField field = indexFields.flattedMapping.get(fieldName); if (field == null || (invalidField != null && (field instanceof InvalidMappedField) == false)) { - createField(fieldName, fieldCaps, indexFields.hierarchicalMapping, indexFields.flattedMapping, s -> - invalidField != null ? invalidField : - createField(s, typeCap.getType(), emptyMap(), typeCap.isAggregatable())); + createField(fieldName, fieldCaps, indexFields.hierarchicalMapping, indexFields.flattedMapping, + s -> invalidField != null ? invalidField : createField(s, typeCap.getType(), emptyMap(), + typeCap.isAggregatable())); } } } @@ -500,14 +537,4 @@ private static List buildIndices(String[] indexNames, String javaRegex, foundIndices.sort(Comparator.comparing(EsIndex::name)); return foundIndices; } - - private static FieldCapabilitiesRequest createFieldCapsRequest(String index) { - return new FieldCapabilitiesRequest() - .indices(Strings.commaDelimitedListToStringArray(index)) - .fields("*") - .includeUnmapped(true) - //lenient because we throw our own errors looking at the response e.g. if something was not resolved - //also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable - .indicesOptions(IndicesOptions.lenientExpandOpen()); - } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java index b09e98d11c17d..34fab72ca1385 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java @@ -52,13 +52,16 @@ public class CompositeAggregationCursor implements Cursor { private final List extractors; private final BitSet mask; private final int limit; + private final boolean includeFrozen; - CompositeAggregationCursor(byte[] next, List exts, BitSet mask, int remainingLimit, String... indices) { + CompositeAggregationCursor(byte[] next, List exts, BitSet mask, int remainingLimit, boolean includeFrozen, + String... indices) { this.indices = indices; this.nextQuery = next; this.extractors = exts; this.mask = mask; this.limit = remainingLimit; + this.includeFrozen = includeFrozen; } public CompositeAggregationCursor(StreamInput in) throws IOException { @@ -68,6 +71,7 @@ public CompositeAggregationCursor(StreamInput in) throws IOException { extractors = in.readNamedWriteableList(BucketExtractor.class); mask = BitSet.valueOf(in.readByteArray()); + includeFrozen = in.readBoolean(); } @Override @@ -78,6 +82,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeNamedWriteableList(extractors); out.writeByteArray(mask.toByteArray()); + out.writeBoolean(includeFrozen); + } @Override @@ -105,6 +111,10 @@ int limit() { return limit; } + boolean includeFrozen() { + return includeFrozen; + } + @Override public void nextPage(Configuration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { SearchSourceBuilder q; @@ -120,7 +130,7 @@ public void nextPage(Configuration cfg, Client client, NamedWriteableRegistry re log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest search = Querier.prepareRequest(client, query, cfg.pageTimeout(), indices); + SearchRequest search = Querier.prepareRequest(client, query, cfg.pageTimeout(), includeFrozen, indices); client.search(search, new ActionListener() { @Override @@ -134,7 +144,8 @@ public void onResponse(SearchResponse r) { } updateCompositeAfterKey(r, query); - CompositeAggsRowSet rowSet = new CompositeAggsRowSet(extractors, mask, r, limit, serializeQuery(query), indices); + CompositeAggsRowSet rowSet = new CompositeAggsRowSet(extractors, mask, r, limit, serializeQuery(query), includeFrozen, + indices); listener.onResponse(rowSet); } catch (Exception ex) { listener.onFailure(ex); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java index fbbc839fe1c76..f93e00eac5ac9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java @@ -28,7 +28,8 @@ class CompositeAggsRowSet extends ResultRowSet { private final int size; private int row = 0; - CompositeAggsRowSet(List exts, BitSet mask, SearchResponse response, int limit, byte[] next, String... indices) { + CompositeAggsRowSet(List exts, BitSet mask, SearchResponse response, int limit, byte[] next, + boolean includeFrozen, String... indices) { super(exts, mask); CompositeAggregation composite = CompositeAggregationCursor.getComposite(response); @@ -53,7 +54,7 @@ class CompositeAggsRowSet extends ResultRowSet { if (next == null || size == 0 || remainingLimit == 0) { cursor = Cursor.EMPTY; } else { - cursor = new CompositeAggregationCursor(next, exts, mask, remainingLimit, indices); + cursor = new CompositeAggregationCursor(next, exts, mask, remainingLimit, includeFrozen, indices); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index fec7000a78780..17e5a79fa4a71 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.CompositeKeyExtractor; @@ -108,7 +109,8 @@ public void query(List output, QueryContainer query, String index, Ac log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index); } - SearchRequest search = prepareRequest(client, sourceBuilder, timeout, Strings.commaDelimitedListToStringArray(index)); + SearchRequest search = prepareRequest(client, sourceBuilder, timeout, query.shouldIncludeFrozen(), + Strings.commaDelimitedListToStringArray(index)); @SuppressWarnings("rawtypes") List> sortingColumns = query.sortingColumns(); @@ -130,13 +132,16 @@ public void query(List output, QueryContainer query, String index, Ac client.search(search, l); } - public static SearchRequest prepareRequest(Client client, SearchSourceBuilder source, TimeValue timeout, String... indices) { + public static SearchRequest prepareRequest(Client client, SearchSourceBuilder source, TimeValue timeout, boolean includeFrozen, + String... indices) { SearchRequest search = client.prepareSearch(indices) // always track total hits accurately .setTrackTotalHits(true) .setAllowPartialSearchResults(false) .setSource(source) .setTimeout(timeout) + .setIndicesOptions( + includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS) .request(); return search; } @@ -175,7 +180,7 @@ class LocalAggregationSorterListener implements ActionListener { } } - this.data = new PriorityQueue, Integer>>(size) { + this.data = new PriorityQueue<>(size) { // compare row based on the received attribute sort // if a sort item is not in the list, it is assumed the sorting happened in ES @@ -389,6 +394,7 @@ protected void handleResponse(SearchResponse response, ActionListener exts, BitSet mask, SearchResponse response, int limitAggs, byte[] next, + boolean includeFrozen, String... indices) { - super(exts, mask, response, limitAggs, next, indices); + super(exts, mask, response, limitAggs, next, includeFrozen, indices); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index 652197473abf4..13294fbca221b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -5,13 +5,17 @@ */ package org.elasticsearch.xpack.sql.execution.search.extractor; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.DateUtils; @@ -128,13 +132,31 @@ private Object unwrapMultiValue(Object values) { if (list.isEmpty()) { return null; } else { - if (arrayLeniency || list.size() == 1) { - return unwrapMultiValue(list.get(0)); - } else { - throw new SqlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + // let's make sure first that we are not dealing with an geo_point represented as an array + if (isGeoPointArray(list) == false) { + if (list.size() == 1 || arrayLeniency) { + return unwrapMultiValue(list.get(0)); + } else { + throw new SqlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName); + } } } } + if (dataType == DataType.GEO_POINT) { + try { + GeoPoint geoPoint = GeoUtils.parseGeoPoint(values, true); + return new GeoShape(geoPoint.lon(), geoPoint.lat()); + } catch (ElasticsearchParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse geo_point value [{}] (returned by [{}])", values, fieldName); + } + } + if (dataType == DataType.GEO_SHAPE) { + try { + return new GeoShape(values); + } catch (IOException ex) { + throw new SqlIllegalArgumentException("Cannot read geo_shape value [{}] (returned by [{}])", values, fieldName); + } + } if (values instanceof Map) { throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName); } @@ -149,6 +171,17 @@ private Object unwrapMultiValue(Object values) { throw new SqlIllegalArgumentException("Type {} (returned by [{}]) is not supported", values.getClass().getSimpleName(), fieldName); } + private boolean isGeoPointArray(List list) { + if (dataType != DataType.GEO_POINT) { + return false; + } + // we expect the point in [lon lat] or [lon lat alt] formats + if (list.size() > 3 || list.size() < 1) { + return false; + } + return list.get(0) instanceof Number; + } + @SuppressWarnings({ "unchecked", "rawtypes" }) Object extractFromSource(Map map) { Object value = null; @@ -173,7 +206,9 @@ Object extractFromSource(Map map) { if (node instanceof List) { List listOfValues = (List) node; - if (listOfValues.size() == 1 || arrayLeniency) { + // we can only do this optimization until the last element of our pass since geo points are using arrays + // and we don't want to blindly ignore the second element of array if arrayLeniency is enabled + if ((i < path.length - 1) && (listOfValues.size() == 1 || arrayLeniency)) { // this is a List with a size of 1 e.g.: {"a" : [{"b" : "value"}]} meaning the JSON is a list with one element // or a list of values with one element e.g.: {"a": {"b" : ["value"]}} // in case of being lenient about arrays, just extract the first value in the array diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java index f6e1e3ad8be69..d382dad83a19d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/TypeResolutions.java @@ -57,6 +57,11 @@ public static TypeResolution isNumericOrDateOrTime(Expression e, String operatio "date", "time", "datetime", "numeric"); } + + public static TypeResolution isGeo(Expression e, String operationName, ParamOrdinal paramOrd) { + return isType(e, DataType::isGeo, operationName, paramOrd, "geo_point", "geo_shape"); + } + public static TypeResolution isExact(Expression e, String message) { if (e instanceof FieldAttribute) { EsField.Exact exact = ((FieldAttribute) e).getExactInfo(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 0e9f07ef2132c..3a9ae06203476 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -46,6 +46,13 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.SecondOfMinute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.WeekOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StAswkt; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StGeometryType; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosql; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StX; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StY; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StZ; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ATan; @@ -249,11 +256,23 @@ private void defineDefaultFunctions() { def(Space.class, Space::new, "SPACE"), def(Substring.class, Substring::new, "SUBSTRING"), def(UCase.class, UCase::new, "UCASE")); + // DataType conversion addToMap(def(Cast.class, Cast::new, "CAST", "CONVERT")); // Scalar "meta" functions addToMap(def(Database.class, Database::new, "DATABASE"), def(User.class, User::new, "USER")); + + // Geo Functions + addToMap(def(StAswkt.class, StAswkt::new, "ST_ASWKT", "ST_ASTEXT"), + def(StDistance.class, StDistance::new, "ST_DISTANCE"), + def(StWkttosql.class, StWkttosql::new, "ST_WKTTOSQL", "ST_GEOMFROMTEXT"), + def(StGeometryType.class, StGeometryType::new, "ST_GEOMETRYTYPE"), + def(StX.class, StX::new, "ST_X"), + def(StY.class, StY::new, "ST_Y"), + def(StZ.class, StZ::new, "ST_Z") + ); + // Special addToMap(def(Score.class, Score::new, "SCORE")); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index d14aeea507f47..0b9bbd1094a44 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -11,6 +11,9 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor; @@ -98,6 +101,10 @@ public static List getNamedWriteables() { entries.add(new Entry(Processor.class, LocateFunctionProcessor.NAME, LocateFunctionProcessor::new)); entries.add(new Entry(Processor.class, ReplaceFunctionProcessor.NAME, ReplaceFunctionProcessor::new)); entries.add(new Entry(Processor.class, SubstringFunctionProcessor.NAME, SubstringFunctionProcessor::new)); + // geo + entries.add(new Entry(Processor.class, GeoProcessor.NAME, GeoProcessor::new)); + entries.add(new Entry(Processor.class, StWkttosqlProcessor.NAME, StWkttosqlProcessor::new)); + entries.add(new Entry(Processor.class, StDistanceProcessor.NAME, StDistanceProcessor::new)); return entries; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java new file mode 100644 index 0000000000000..519e4c0c74092 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessor.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.function.Function; + +public class GeoProcessor implements Processor { + + private interface GeoShapeFunction { + default R apply(Object o) { + if (o instanceof GeoShape) { + return doApply((GeoShape) o); + } else { + throw new SqlIllegalArgumentException("A geo_point or geo_shape is required; received [{}]", o); + } + } + + R doApply(GeoShape s); + } + + public enum GeoOperation { + ASWKT(GeoShape::toString), + GEOMETRY_TYPE(GeoShape::getGeometryType), + X(GeoShape::getX), + Y(GeoShape::getY), + Z(GeoShape::getZ); + + private final Function apply; + + GeoOperation(GeoShapeFunction apply) { + this.apply = l -> l == null ? null : apply.apply(l); + } + + public final Object apply(Object l) { + return apply.apply(l); + } + } + + public static final String NAME = "geo"; + + private final GeoOperation processor; + + public GeoProcessor(GeoOperation processor) { + this.processor = processor; + } + + public GeoProcessor(StreamInput in) throws IOException { + processor = in.readEnum(GeoOperation.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(processor); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object process(Object input) { + return processor.apply(input); + } + + GeoOperation processor() { + return processor; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + GeoProcessor other = (GeoProcessor) obj; + return processor == other.processor; + } + + @Override + public int hashCode() { + return processor.hashCode(); + } + + @Override + public String toString() { + return processor.toString(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java new file mode 100644 index 0000000000000..74b5c9646b853 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.GeometryParser; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.geo.geometry.Circle; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.GeometryCollection; +import org.elasticsearch.geo.geometry.GeometryVisitor; +import org.elasticsearch.geo.geometry.Line; +import org.elasticsearch.geo.geometry.LinearRing; +import org.elasticsearch.geo.geometry.MultiLine; +import org.elasticsearch.geo.geometry.MultiPoint; +import org.elasticsearch.geo.geometry.MultiPolygon; +import org.elasticsearch.geo.geometry.Point; +import org.elasticsearch.geo.geometry.Polygon; +import org.elasticsearch.geo.geometry.Rectangle; +import org.elasticsearch.geo.utils.WellKnownText; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import java.io.IOException; +import java.io.InputStream; +import java.text.ParseException; +import java.util.Objects; + +/** + * Wrapper class to represent a GeoShape in SQL + * + * It is required to override the XContent serialization. The ShapeBuilder serializes using GeoJSON by default, + * but in SQL we need the serialization to be WKT-based. + */ +public class GeoShape implements ToXContentFragment, NamedWriteable { + + public static final String NAME = "geo"; + + private final Geometry shape; + + public GeoShape(double lon, double lat) { + shape = new Point(lat, lon); + } + + public GeoShape(Object value) throws IOException { + try { + shape = parse(value); + } catch (ParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + } + } + + public GeoShape(StreamInput in) throws IOException { + String value = in.readString(); + try { + shape = parse(value); + } catch (ParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(WellKnownText.toWKT(shape)); + } + + @Override + public String toString() { + return WellKnownText.toWKT(shape); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.value(WellKnownText.toWKT(shape)); + } + + public Geometry toGeometry() { + return shape; + } + + public Point firstPoint() { + return shape.visit(new GeometryVisitor() { + @Override + public Point visit(Circle circle) { + return new Point(circle.getLat(), circle.getLon(), circle.hasAlt() ? circle.getAlt() : Double.NaN); + } + + @Override + public Point visit(GeometryCollection collection) { + if (collection.size() > 0) { + return collection.get(0).visit(this); + } + return null; + } + + @Override + public Point visit(Line line) { + if (line.length() > 0) { + return new Point(line.getLat(0), line.getLon(0), line.hasAlt() ? line.getAlt(0) : Double.NaN); + } + return null; + } + + @Override + public Point visit(LinearRing ring) { + return visit((Line) ring); + } + + @Override + public Point visit(MultiLine multiLine) { + return visit((GeometryCollection) multiLine); + } + + @Override + public Point visit(MultiPoint multiPoint) { + return visit((GeometryCollection) multiPoint); + } + + @Override + public Point visit(MultiPolygon multiPolygon) { + return visit((GeometryCollection) multiPolygon); + } + + @Override + public Point visit(Point point) { + return point; + } + + @Override + public Point visit(Polygon polygon) { + return visit(polygon.getPolygon()); + } + + @Override + public Point visit(Rectangle rectangle) { + return new Point(rectangle.getMinLat(), rectangle.getMinLon(), rectangle.getMinAlt()); + } + }); + } + + public Double getX() { + Point firstPoint = firstPoint(); + return firstPoint != null ? firstPoint.getLon() : null; + } + + public Double getY() { + Point firstPoint = firstPoint(); + return firstPoint != null ? firstPoint.getLat() : null; + } + + public Double getZ() { + Point firstPoint = firstPoint(); + return firstPoint != null && firstPoint.hasAlt() ? firstPoint.getAlt() : null; + } + + public String getGeometryType() { + return toGeometry().type().name(); + } + + public static double distance(GeoShape shape1, GeoShape shape2) { + if (shape1.shape instanceof Point == false) { + throw new SqlIllegalArgumentException("distance calculation is only supported for points; received [{}]", shape1); + } + if (shape2.shape instanceof Point == false) { + throw new SqlIllegalArgumentException("distance calculation is only supported for points; received [{}]", shape2); + } + double srcLat = ((Point) shape1.shape).getLat(); + double srcLon = ((Point) shape1.shape).getLon(); + double dstLat = ((Point) shape2.shape).getLat(); + double dstLon = ((Point) shape2.shape).getLon(); + return GeoUtils.arcDistance(srcLat, srcLon, dstLat, dstLon); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GeoShape geoShape = (GeoShape) o; + return shape.equals(geoShape.shape); + } + + @Override + public int hashCode() { + return Objects.hash(shape); + } + + @Override + public String getWriteableName() { + return NAME; + } + + private static Geometry parse(Object value) throws IOException, ParseException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field("value", value); + content.endObject(); + + try (InputStream stream = BytesReference.bytes(content).streamInput(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return GeometryParser.parse(parser, true, true, true); + } + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java new file mode 100644 index 0000000000000..5c4b6edbe87eb --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StAswkt.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_AsWKT function that takes a geometry and returns its Well Known Text representation + */ +public class StAswkt extends UnaryGeoFunction { + + public StAswkt(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StAswkt::new, field()); + } + + @Override + protected StAswkt replaceChild(Expression newChild) { + return new StAswkt(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.ASWKT; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java new file mode 100644 index 0000000000000..fd14e90dd9d93 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistance.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isGeo; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Calculates the distance between two points + */ +public class StDistance extends BinaryOperator { + + private static final StDistanceFunction FUNCTION = new StDistanceFunction(); + + public StDistance(Source source, Expression source1, Expression source2) { + super(source, source1, source2, FUNCTION); + } + + @Override + protected StDistance replaceChildren(Expression newLeft, Expression newRight) { + return new StDistance(source(), newLeft, newRight); + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StDistance::new, left(), right()); + } + + @Override + public ScriptTemplate scriptWithField(FieldAttribute field) { + return new ScriptTemplate(processScript("{sql}.geoDocValue(doc,{})"), + paramsBuilder().variable(field.exactAttribute().name()).build(), + dataType()); + } + + @Override + protected TypeResolution resolveInputType(Expression e, Expressions.ParamOrdinal paramOrdinal) { + return isGeo(e, sourceText(), paramOrdinal); + } + + @Override + public StDistance swapLeftAndRight() { + return new StDistance(source(), right(), left()); + } + + @Override + protected Pipe makePipe() { + return new StDistancePipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right())); + } + + @Override + protected String scriptMethodName() { + return "stDistance"; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java new file mode 100644 index 0000000000000..d1c15c1e2a1b2 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceFunction.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.predicate.PredicateBiFunction; + +class StDistanceFunction implements PredicateBiFunction { + + @Override + public String name() { + return "ST_DISTANCE"; + } + + @Override + public String symbol() { + return "ST_DISTANCE"; + } + + @Override + public Double doApply(Object s1, Object s2) { + return StDistanceProcessor.process(s1, s2); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java new file mode 100644 index 0000000000000..c944266482651 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistancePipe.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe; +import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.Objects; + +public class StDistancePipe extends BinaryPipe { + + public StDistancePipe(Source source, Expression expression, Pipe left, Pipe right) { + super(source, expression, left, right); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StDistancePipe::new, expression(), left(), right()); + } + + @Override + protected BinaryPipe replaceChildren(Pipe left, Pipe right) { + return new StDistancePipe(source(), expression(), left, right); + } + + @Override + public StDistanceProcessor asProcessor() { + return new StDistanceProcessor(left().asProcessor(), right().asProcessor()); + } + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + StDistancePipe other = (StDistancePipe) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java new file mode 100644 index 0000000000000..d6c9026b982d9 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessor.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; +import java.util.Objects; + +public class StDistanceProcessor extends BinaryProcessor { + + public static final String NAME = "geo_distance"; + + public StDistanceProcessor(Processor source1, Processor source2) { + super(source1, source2); + } + + public StDistanceProcessor(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWrite(StreamOutput out) throws IOException { + + } + + @Override + public Object process(Object input) { + Object l = left().process(input); + checkParameter(l); + Object r = right().process(input); + checkParameter(r); + return doProcess(l, r); + } + + @Override + protected Object doProcess(Object left, Object right) { + return process(left, right); + } + + public static Double process(Object source1, Object source2) { + if (source1 == null || source2 == null) { + return null; + } + + if (source1 instanceof GeoShape == false) { + throw new SqlIllegalArgumentException("A geo_point or geo_shape with type point is required; received [{}]", source1); + } + if (source2 instanceof GeoShape == false) { + throw new SqlIllegalArgumentException("A geo_point or geo_shape with type point is required; received [{}]", source2); + } + return GeoShape.distance((GeoShape) source1, (GeoShape) source2); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + StDistanceProcessor other = (StDistanceProcessor) obj; + return Objects.equals(left(), other.left()) + && Objects.equals(right(), other.right()); + } + + @Override + public int hashCode() { + return Objects.hash(left(), right()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java new file mode 100644 index 0000000000000..15215bd9201de --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StGeometryType.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_GEOMETRY_TYPE function that takes a geometry and returns its type + */ +public class StGeometryType extends UnaryGeoFunction { + + public StGeometryType(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StGeometryType::new, field()); + } + + @Override + protected StGeometryType replaceChild(Expression newChild) { + return new StGeometryType(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.GEOMETRY_TYPE; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java new file mode 100644 index 0000000000000..3ebae55dec4f0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosql.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.Scripts; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString; + +/** + * Constructs geometric objects from their WTK representations + */ +public class StWkttosql extends UnaryScalarFunction { + + public StWkttosql(Source source, Expression field) { + super(source, field); + } + + @Override + protected StWkttosql replaceChild(Expression newChild) { + return new StWkttosql(source(), newChild); + } + + @Override + protected TypeResolution resolveType() { + if (field().dataType().isString()) { + return TypeResolution.TYPE_RESOLVED; + } + return isString(field(), functionName(), Expressions.ParamOrdinal.DEFAULT); + } + + @Override + protected Processor makeProcessor() { + return StWkttosqlProcessor.INSTANCE; + } + + @Override + public DataType dataType() { + return DataType.GEO_SHAPE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StWkttosql::new, field()); + } + + @Override + public String processScript(String script) { + return Scripts.formatTemplate(Scripts.SQL_SCRIPTS + ".stWktToSql(" + script + ")"); + } + + @Override + public Object fold() { + return StWkttosqlProcessor.INSTANCE.process(field().fold()); + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java new file mode 100644 index 0000000000000..f17ee2315befe --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessor.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import java.io.IOException; + +public class StWkttosqlProcessor implements Processor { + + static final StWkttosqlProcessor INSTANCE = new StWkttosqlProcessor(); + + public static final String NAME = "geo_wkttosql"; + + StWkttosqlProcessor() { + } + + public StWkttosqlProcessor(StreamInput in) throws IOException { + } + + @Override + public Object process(Object input) { + return StWkttosqlProcessor.apply(input); + } + + public static GeoShape apply(Object input) { + if (input == null) { + return null; + } + + if ((input instanceof String) == false) { + throw new SqlIllegalArgumentException("A string is required; received [{}]", input); + } + try { + return new GeoShape(input); + } catch (IOException | IllegalArgumentException | ElasticsearchParseException ex) { + throw new SqlIllegalArgumentException("Cannot parse [{}] as a geo_shape value", input); + } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return 0; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java new file mode 100644 index 0000000000000..f3cdafbe70dab --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StX.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_X function that takes a geometry and returns the X coordinate of its first point + */ +public class StX extends UnaryGeoFunction { + + public StX(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StX::new, field()); + } + + @Override + protected StX replaceChild(Expression newChild) { + return new StX(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.X; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java new file mode 100644 index 0000000000000..0a9bc3aa1a40b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StY.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_Y function that takes a geometry and returns the Y coordinate of its first point + */ +public class StY extends UnaryGeoFunction { + + public StY(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StY::new, field()); + } + + @Override + protected StY replaceChild(Expression newChild) { + return new StY(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.Y; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java new file mode 100644 index 0000000000000..b6c0c9466bbe1 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StZ.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * ST_Z function that takes a geometry and returns the Z coordinate of its first point + */ +public class StZ extends UnaryGeoFunction { + + public StZ(Source source, Expression field) { + super(source, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, StZ::new, field()); + } + + @Override + protected StZ replaceChild(Expression newChild) { + return new StZ(source(), newChild); + } + + @Override + protected GeoOperation operation() { + return GeoOperation.Z; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java new file mode 100644 index 0000000000000..50c05b7fbedb7 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/UnaryGeoFunction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.util.StringUtils; + +import java.util.Locale; +import java.util.Objects; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isGeo; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + +/** + * Base class for functions that get a single geo shape or geo point as an argument + */ +public abstract class UnaryGeoFunction extends UnaryScalarFunction { + + protected UnaryGeoFunction(Source source, Expression field) { + super(source, field); + } + + @Override + public Object fold() { + return operation().apply(field().fold()); + } + + @Override + protected TypeResolution resolveType() { + if (!childrenResolved()) { + return new TypeResolution("Unresolved children"); + } + return isGeo(field(), operation().toString(), Expressions.ParamOrdinal.DEFAULT); + } + + @Override + protected Processor makeProcessor() { + return new GeoProcessor(operation()); + } + + protected abstract GeoProcessor.GeoOperation operation(); + + @Override + public ScriptTemplate scriptWithField(FieldAttribute field) { + //TODO change this to use _source instead of the exact form (aka field.keyword for geo shape fields) + return new ScriptTemplate(processScript("{sql}.geoDocValue(doc,{})"), + paramsBuilder().variable(field.exactAttribute().name()).build(), + dataType()); + } + + @Override + public String processScript(String template) { + // basically, transform the script to InternalSqlScriptUtils.[function_name](other_function_or_field_name) + return super.processScript( + format(Locale.ROOT, "{sql}.%s(%s)", + StringUtils.underscoreToLowerCamelCase("ST_" + operation().name()), + template)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + UnaryGeoFunction other = (UnaryGeoFunction) obj; + return Objects.equals(other.field(), field()); + } + + @Override + public int hashCode() { + return Objects.hash(field()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 6a4ec411fe1cf..d39aec4423684 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.function.scalar.whitelist; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.script.JodaCompatibleZonedDateTime; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; @@ -12,6 +13,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistanceProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StWkttosqlProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.TimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor.BinaryMathOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryOptionalMathProcessor.BinaryOptionalMathOperation; @@ -73,7 +78,7 @@ public static Object docValue(Map> doc, String fi } return null; } - + public static boolean nullSafeFilter(Boolean filter) { return filter == null ? false : filter.booleanValue(); } @@ -109,7 +114,7 @@ public static Boolean neq(Object left, Object right) { public static Boolean lt(Object left, Object right) { return BinaryComparisonOperation.LT.apply(left, right); } - + public static Boolean lte(Object left, Object right) { return BinaryComparisonOperation.LTE.apply(left, right); } @@ -125,7 +130,7 @@ public static Boolean gte(Object left, Object right) { public static Boolean and(Boolean left, Boolean right) { return BinaryLogicOperation.AND.apply(left, right); } - + public static Boolean or(Boolean left, Boolean right) { return BinaryLogicOperation.OR.apply(left, right); } @@ -328,14 +333,14 @@ public static Integer dateTimeChrono(Object dateTime, String tzId, String chrono } return DateTimeFunction.dateTimeChrono(asDateTime(dateTime), tzId, chronoName); } - + public static String dayName(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; } return NameExtractor.DAY_NAME.extract(asDateTime(dateTime), tzId); } - + public static Integer dayOfWeek(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; @@ -349,7 +354,7 @@ public static String monthName(Object dateTime, String tzId) { } return NameExtractor.MONTH_NAME.extract(asDateTime(dateTime), tzId); } - + public static Integer quarter(Object dateTime, String tzId) { if (dateTime == null || tzId == null) { return null; @@ -390,7 +395,7 @@ private static Object asDateTime(Object dateTime, boolean lenient) { } return dateTime; } - + public static IntervalDayTime intervalDayTime(String text, String typeName) { if (text == null || typeName == null) { return null; @@ -416,7 +421,7 @@ public static OffsetTime asTime(String time) { public static Integer ascii(String s) { return (Integer) StringOperation.ASCII.apply(s); } - + public static Integer bitLength(String s) { return (Integer) StringOperation.BIT_LENGTH.apply(s); } @@ -428,7 +433,7 @@ public static String character(Number n) { public static Integer charLength(String s) { return (Integer) StringOperation.CHAR_LENGTH.apply(s); } - + public static String concat(String s1, String s2) { return (String) ConcatFunctionProcessor.process(s1, s2); } @@ -452,7 +457,7 @@ public static Integer length(String s) { public static Integer locate(String s1, String s2) { return locate(s1, s2, null); } - + public static Integer locate(String s1, String s2, Number pos) { return LocateFunctionProcessor.doProcess(s1, s2, pos); } @@ -460,7 +465,7 @@ public static Integer locate(String s1, String s2, Number pos) { public static String ltrim(String s) { return (String) StringOperation.LTRIM.apply(s); } - + public static Integer octetLength(String s) { return (Integer) StringOperation.OCTET_LENGTH.apply(s); } @@ -468,15 +473,15 @@ public static Integer octetLength(String s) { public static Integer position(String s1, String s2) { return (Integer) BinaryStringStringOperation.POSITION.apply(s1, s2); } - + public static String repeat(String s, Number count) { return BinaryStringNumericOperation.REPEAT.apply(s, count); } - + public static String replace(String s1, String s2, String s3) { return (String) ReplaceFunctionProcessor.doProcess(s1, s2, s3); } - + public static String right(String s, Number count) { return BinaryStringNumericOperation.RIGHT.apply(s, count); } @@ -496,7 +501,47 @@ public static String substring(String s, Number start, Number length) { public static String ucase(String s) { return (String) StringOperation.UCASE.apply(s); } - + + public static String stAswkt(Object v) { + return GeoProcessor.GeoOperation.ASWKT.apply(v).toString(); + } + + public static GeoShape stWktToSql(String wktString) { + return StWkttosqlProcessor.apply(wktString); + } + + public static Double stDistance(Object v1, Object v2) { + return StDistanceProcessor.process(v1, v2); + } + + public static String stGeometryType(Object g) { + return (String) GeoProcessor.GeoOperation.GEOMETRY_TYPE.apply(g); + } + + public static Double stX(Object g) { + return (Double) GeoProcessor.GeoOperation.X.apply(g); + } + + public static Double stY(Object g) { + return (Double) GeoProcessor.GeoOperation.Y.apply(g); + } + + public static Double stZ(Object g) { + return (Double) GeoProcessor.GeoOperation.Z.apply(g); + } + + // processes doc value as a geometry + public static GeoShape geoDocValue(Map> doc, String fieldName) { + Object obj = docValue(doc, fieldName); + if (obj != null) { + if (obj instanceof GeoPoint) { + return new GeoShape(((GeoPoint) obj).getLon(), ((GeoPoint) obj).getLat()); + } + // TODO: Add support for geo_shapes when it is there + } + return null; + } + // // Casting // diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java index b24ec56727d64..223e22b2a33ba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime; import org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth; import org.elasticsearch.xpack.sql.type.DataType; @@ -95,6 +96,13 @@ default ScriptTemplate scriptWithFoldable(Expression foldable) { dataType()); } + if (fold instanceof GeoShape) { + GeoShape geoShape = (GeoShape) fold; + return new ScriptTemplate(processScript("{sql}.stWktToSql({})"), + paramsBuilder().variable(geoShape.toString()).build(), + dataType()); + } + return new ScriptTemplate(processScript("{}"), paramsBuilder().variable(fold).build(), dataType()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java index b06a1fb887433..ed7dc9da77543 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Intervals.java @@ -408,5 +408,4 @@ public static TemporalAmount negate(TemporalAmount interval) { public static TemporalAmount parseInterval(Source source, String value, DataType intervalType) { return PARSERS.get(intervalType).parse(source, value); } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java index 333ba3f11c0b1..d6bdeeb0fe46b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/literal/Literals.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.literal; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import java.util.ArrayList; import java.util.Collection; @@ -30,6 +31,7 @@ public static Collection getNamedWriteab entries.add(new NamedWriteableRegistry.Entry(IntervalDayTime.class, IntervalDayTime.NAME, IntervalDayTime::new)); entries.add(new NamedWriteableRegistry.Entry(IntervalYearMonth.class, IntervalYearMonth.NAME, IntervalYearMonth::new)); + entries.add(new NamedWriteableRegistry.Entry(GeoShape.class, GeoShape.NAME, GeoShape::new)); return entries; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java index b6bfaa4acb63d..5705bb4d85ab4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java @@ -164,7 +164,7 @@ protected Object doProcess(Object left, Object right) { return null; } - if (f == BinaryArithmeticOperation.MUL || f == BinaryArithmeticOperation.DIV || f == BinaryArithmeticOperation.MOD) { + if (f == BinaryArithmeticOperation.DIV || f == BinaryArithmeticOperation.MOD) { if (!(left instanceof Number)) { throw new SqlIllegalArgumentException("A number is required; received {}", left); } @@ -176,8 +176,8 @@ protected Object doProcess(Object left, Object right) { return f.apply(left, right); } - if (f == BinaryArithmeticOperation.ADD || f == BinaryArithmeticOperation.SUB) { - return f.apply(left, right); + if (f == BinaryArithmeticOperation.ADD || f == BinaryArithmeticOperation.SUB || f == BinaryArithmeticOperation.MUL) { + return f.apply(left, right); } // this should not occur diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index 5be5e28718459..5b1076592d859 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -43,7 +43,7 @@ protected TypeResolution resolveType() { // 2. 3. 4. intervals if ((DataTypes.isInterval(l) || DataTypes.isInterval(r))) { if (DataTypeConversion.commonType(l, r) == null) { - return new TypeResolution(format("[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); + return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); } else { return resolveWithIntervals(); } @@ -54,6 +54,12 @@ protected TypeResolution resolveType() { } protected TypeResolution resolveWithIntervals() { + DataType l = left().dataType(); + DataType r = right().dataType(); + + if (!(r.isDateOrTimeBased() || DataTypes.isInterval(r))|| !(l.isDateOrTimeBased() || DataTypes.isInterval(l))) { + return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); + } return TypeResolution.TYPE_RESOLVED; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java index 7a09bbedebfa3..e3fa7ac1031f7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Mul.java @@ -47,7 +47,7 @@ protected TypeResolution resolveType() { return TypeResolution.TYPE_RESOLVED; } - return new TypeResolution(format("[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); + return new TypeResolution(format(null, "[{}] has arguments with incompatible types [{}] and [{}]", symbol(), l, r)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index ee3ca6aa6773b..a47b9cc973122 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -34,6 +34,10 @@ protected Sub replaceChildren(Expression newLeft, Expression newRight) { @Override protected TypeResolution resolveWithIntervals() { + TypeResolution resolution = super.resolveWithIntervals(); + if (resolution.unresolved()) { + return resolution; + } if ((right().dataType().isDateOrTimeBased()) && DataTypes.isInterval(left().dataType())) { return new TypeResolution(format(null, "Cannot subtract a {}[{}] from an interval[{}]; do you mean the reverse?", right().dataType().typeName, right().source().text(), left().source().text())); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java index ba2a39069953a..6de27b7776338 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java @@ -125,7 +125,7 @@ public Object visitShowFunctions(ShowFunctionsContext ctx) { public Object visitShowTables(ShowTablesContext ctx) { TableIdentifier ti = visitTableIdentifier(ctx.tableIdent); String index = ti != null ? ti.qualifiedIndex() : null; - return new ShowTables(source(ctx), index, visitLikePattern(ctx.likePattern())); + return new ShowTables(source(ctx), index, visitLikePattern(ctx.likePattern()), ctx.FROZEN() != null); } @Override @@ -137,7 +137,7 @@ public Object visitShowSchemas(ShowSchemasContext ctx) { public Object visitShowColumns(ShowColumnsContext ctx) { TableIdentifier ti = visitTableIdentifier(ctx.tableIdent); String index = ti != null ? ti.qualifiedIndex() : null; - return new ShowColumns(source(ctx), index, visitLikePattern(ctx.likePattern())); + return new ShowColumns(source(ctx), index, visitLikePattern(ctx.likePattern()), ctx.FROZEN() != null); } @Override @@ -154,12 +154,21 @@ public SysTables visitSysTables(SysTablesContext ctx) { } // special case for legacy apps (like msquery) that always asks for 'TABLE' // which we manually map to all concrete tables supported - else if (value.toUpperCase(Locale.ROOT).equals("TABLE")) { - legacyTableType = true; - types.add(IndexType.INDEX); - } else { - IndexType type = IndexType.from(value); - types.add(type); + else { + switch (value.toUpperCase(Locale.ROOT)) { + case IndexType.SQL_TABLE: + legacyTableType = true; + types.add(IndexType.STANDARD_INDEX); + break; + case IndexType.SQL_BASE_TABLE: + types.add(IndexType.STANDARD_INDEX); + break; + case IndexType.SQL_VIEW: + types.add(IndexType.ALIAS); + break; + default: + types.add(IndexType.UNKNOWN); + } } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index f27368912c1d2..429e572878f55 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -211,6 +211,6 @@ public Object visitSubquery(SubqueryContext ctx) { public LogicalPlan visitTableName(TableNameContext ctx) { String alias = visitQualifiedName(ctx.qualifiedName()); TableIdentifier tableIdentifier = visitTableIdentifier(ctx.tableIdentifier()); - return new UnresolvedRelation(source(ctx), tableIdentifier, alias); + return new UnresolvedRelation(source(ctx), tableIdentifier, alias, ctx.FROZEN() != null); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index a537f9f369e92..de8afac152686 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -1,16 +1,13 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; - -import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Lexer; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseLexer extends Lexer { @@ -25,21 +22,21 @@ class SqlBaseLexer extends Lexer { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FULL=40, FUNCTIONS=41, GRAPHVIZ=42, GROUP=43, - HAVING=44, HOUR=45, HOURS=46, IN=47, INNER=48, INTERVAL=49, IS=50, JOIN=51, - LAST=52, LEFT=53, LIKE=54, LIMIT=55, MAPPED=56, MATCH=57, MINUTE=58, MINUTES=59, - MONTH=60, MONTHS=61, NATURAL=62, NOT=63, NULL=64, NULLS=65, ON=66, OPTIMIZED=67, - OR=68, ORDER=69, OUTER=70, PARSED=71, PHYSICAL=72, PLAN=73, RIGHT=74, - RLIKE=75, QUERY=76, SCHEMAS=77, SECOND=78, SECONDS=79, SELECT=80, SHOW=81, - SYS=82, TABLE=83, TABLES=84, TEXT=85, THEN=86, TRUE=87, TO=88, TYPE=89, - TYPES=90, USING=91, VERIFY=92, WHEN=93, WHERE=94, WITH=95, YEAR=96, YEARS=97, - ESCAPE_ESC=98, FUNCTION_ESC=99, LIMIT_ESC=100, DATE_ESC=101, TIME_ESC=102, - TIMESTAMP_ESC=103, GUID_ESC=104, ESC_END=105, EQ=106, NULLEQ=107, NEQ=108, - LT=109, LTE=110, GT=111, GTE=112, PLUS=113, MINUS=114, ASTERISK=115, SLASH=116, - PERCENT=117, CAST_OP=118, CONCAT=119, DOT=120, PARAM=121, STRING=122, - INTEGER_VALUE=123, DECIMAL_VALUE=124, IDENTIFIER=125, DIGIT_IDENTIFIER=126, - TABLE_IDENTIFIER=127, QUOTED_IDENTIFIER=128, BACKQUOTED_IDENTIFIER=129, - SIMPLE_COMMENT=130, BRACKETED_COMMENT=131, WS=132, UNRECOGNIZED=133; + FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, + GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, + IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, + MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, + NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, + PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, + SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, + TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, + YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, + TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, + NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, + SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, + INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, + TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, + SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -50,20 +47,20 @@ class SqlBaseLexer extends Lexer { "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", - "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED" + "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", + "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", + "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", + "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", + "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", + "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { @@ -73,18 +70,18 @@ class SqlBaseLexer extends Lexer { "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", - "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", - "'HOUR'", "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", - "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", - "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", - "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", - "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", - "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", - "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", - "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", - "'{D'", "'{T'", "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", - "'.'", "'?'" + "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", + "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", + "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", + "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", + "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", + "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", + "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", + "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", + "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", + "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", + "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -92,19 +89,20 @@ class SqlBaseLexer extends Lexer { "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", - "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", + "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", + "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", + "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", + "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", + "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -161,7 +159,7 @@ public SqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u0087\u045e\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u0089\u0471\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -177,375 +175,384 @@ public SqlBaseLexer(CharStream input) { "\4w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t"+ "\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084\t\u0084"+ "\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087\4\u0088\t\u0088\4\u0089"+ - "\t\u0089\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3"+ - "\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t"+ - "\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3"+ - "\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20"+ - "\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22"+ - "\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24"+ - "\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+ - "\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26"+ - "\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31"+ - "\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33"+ - "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35"+ - "\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3"+ - " \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\""+ - "\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3"+ - "%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3"+ - ")\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3+\3+\3"+ - ",\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3"+ - "/\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62"+ - "\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\65\3\65"+ - "\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\3"+ - "8\38\38\38\38\39\39\39\39\39\39\39\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3"+ - ";\3;\3<\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3"+ - "?\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3B\3"+ - "C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3F\3G\3"+ - "G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3"+ - "J\3J\3J\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3N\3"+ - "N\3N\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3Q\3Q\3"+ - "Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3U\3U\3U\3"+ - "U\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Z\3"+ - "Z\3Z\3Z\3Z\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]\3"+ - "]\3]\3^\3^\3^\3^\3^\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3"+ - "b\3b\3b\3b\3b\3b\3c\3c\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3e\3e\3e\3e\3e\3"+ - "e\3e\3f\3f\3f\3g\3g\3g\3h\3h\3h\3h\3i\3i\3i\3i\3i\3i\3j\3j\3k\3k\3l\3"+ - "l\3l\3l\3m\3m\3m\3m\5m\u039c\nm\3n\3n\3o\3o\3o\3p\3p\3q\3q\3q\3r\3r\3"+ - "s\3s\3t\3t\3u\3u\3v\3v\3w\3w\3w\3x\3x\3x\3y\3y\3z\3z\3{\3{\3{\3{\7{\u03c0"+ - "\n{\f{\16{\u03c3\13{\3{\3{\3|\6|\u03c8\n|\r|\16|\u03c9\3}\6}\u03cd\n}"+ - "\r}\16}\u03ce\3}\3}\7}\u03d3\n}\f}\16}\u03d6\13}\3}\3}\6}\u03da\n}\r}"+ - "\16}\u03db\3}\6}\u03df\n}\r}\16}\u03e0\3}\3}\7}\u03e5\n}\f}\16}\u03e8"+ - "\13}\5}\u03ea\n}\3}\3}\3}\3}\6}\u03f0\n}\r}\16}\u03f1\3}\3}\5}\u03f6\n"+ - "}\3~\3~\5~\u03fa\n~\3~\3~\3~\7~\u03ff\n~\f~\16~\u0402\13~\3\177\3\177"+ - "\3\177\3\177\6\177\u0408\n\177\r\177\16\177\u0409\3\u0080\3\u0080\3\u0080"+ - "\6\u0080\u040f\n\u0080\r\u0080\16\u0080\u0410\3\u0081\3\u0081\3\u0081"+ - "\3\u0081\7\u0081\u0417\n\u0081\f\u0081\16\u0081\u041a\13\u0081\3\u0081"+ - "\3\u0081\3\u0082\3\u0082\3\u0082\3\u0082\7\u0082\u0422\n\u0082\f\u0082"+ - "\16\u0082\u0425\13\u0082\3\u0082\3\u0082\3\u0083\3\u0083\5\u0083\u042b"+ - "\n\u0083\3\u0083\6\u0083\u042e\n\u0083\r\u0083\16\u0083\u042f\3\u0084"+ - "\3\u0084\3\u0085\3\u0085\3\u0086\3\u0086\3\u0086\3\u0086\7\u0086\u043a"+ - "\n\u0086\f\u0086\16\u0086\u043d\13\u0086\3\u0086\5\u0086\u0440\n\u0086"+ - "\3\u0086\5\u0086\u0443\n\u0086\3\u0086\3\u0086\3\u0087\3\u0087\3\u0087"+ - "\3\u0087\3\u0087\7\u0087\u044c\n\u0087\f\u0087\16\u0087\u044f\13\u0087"+ - "\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0088\6\u0088\u0457\n\u0088"+ - "\r\u0088\16\u0088\u0458\3\u0088\3\u0088\3\u0089\3\u0089\3\u044d\2\u008a"+ - "\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20"+ - "\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37"+ - "= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o"+ - "9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH"+ - "\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1"+ - "R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5"+ - "\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9"+ - "f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00dd"+ - "p\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1"+ - "z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff\u0081\u0101"+ - "\u0082\u0103\u0083\u0105\2\u0107\2\u0109\2\u010b\u0084\u010d\u0085\u010f"+ - "\u0086\u0111\u0087\3\2\13\3\2))\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3"+ - "\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u047e\2\3\3\2\2\2\2\5\3\2\2\2\2"+ - "\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2"+ - "\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2"+ - "\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2"+ - "\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2"+ - "\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2"+ - "\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2"+ - "M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3"+ - "\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2"+ - "\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2"+ - "s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177"+ - "\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2"+ - "\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091"+ - "\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2"+ - "\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3"+ - "\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2"+ - "\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5"+ - "\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2"+ - "\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7"+ - "\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2"+ - "\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9"+ - "\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2"+ - "\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb"+ - "\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2"+ - "\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd"+ - "\3\2\2\2\2\u00ff\3\2\2\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u010b\3\2\2"+ - "\2\2\u010d\3\2\2\2\2\u010f\3\2\2\2\2\u0111\3\2\2\2\3\u0113\3\2\2\2\5\u0115"+ - "\3\2\2\2\7\u0117\3\2\2\2\t\u0119\3\2\2\2\13\u011b\3\2\2\2\r\u011f\3\2"+ - "\2\2\17\u0127\3\2\2\2\21\u0130\3\2\2\2\23\u0134\3\2\2\2\25\u0138\3\2\2"+ - "\2\27\u013b\3\2\2\2\31\u013f\3\2\2\2\33\u0147\3\2\2\2\35\u014a\3\2\2\2"+ - "\37\u014f\3\2\2\2!\u0154\3\2\2\2#\u015c\3\2\2\2%\u0165\3\2\2\2\'\u016d"+ - "\3\2\2\2)\u0175\3\2\2\2+\u0182\3\2\2\2-\u018f\3\2\2\2/\u01a1\3\2\2\2\61"+ - "\u01a5\3\2\2\2\63\u01aa\3\2\2\2\65\u01b0\3\2\2\2\67\u01b5\3\2\2\29\u01be"+ - "\3\2\2\2;\u01c7\3\2\2\2=\u01cc\3\2\2\2?\u01d0\3\2\2\2A\u01d7\3\2\2\2C"+ - "\u01e2\3\2\2\2E\u01e9\3\2\2\2G\u01f1\3\2\2\2I\u01f9\3\2\2\2K\u01ff\3\2"+ - "\2\2M\u0205\3\2\2\2O\u020c\3\2\2\2Q\u0211\3\2\2\2S\u0216\3\2\2\2U\u0220"+ - "\3\2\2\2W\u0229\3\2\2\2Y\u022f\3\2\2\2[\u0236\3\2\2\2]\u023b\3\2\2\2_"+ - "\u0241\3\2\2\2a\u0244\3\2\2\2c\u024a\3\2\2\2e\u0253\3\2\2\2g\u0256\3\2"+ - "\2\2i\u025b\3\2\2\2k\u0260\3\2\2\2m\u0265\3\2\2\2o\u026a\3\2\2\2q\u0270"+ - "\3\2\2\2s\u0277\3\2\2\2u\u027d\3\2\2\2w\u0284\3\2\2\2y\u028c\3\2\2\2{"+ - "\u0292\3\2\2\2}\u0299\3\2\2\2\177\u02a1\3\2\2\2\u0081\u02a5\3\2\2\2\u0083"+ - "\u02aa\3\2\2\2\u0085\u02b0\3\2\2\2\u0087\u02b3\3\2\2\2\u0089\u02bd\3\2"+ - "\2\2\u008b\u02c0\3\2\2\2\u008d\u02c6\3\2\2\2\u008f\u02cc\3\2\2\2\u0091"+ - "\u02d3\3\2\2\2\u0093\u02dc\3\2\2\2\u0095\u02e1\3\2\2\2\u0097\u02e7\3\2"+ - "\2\2\u0099\u02ed\3\2\2\2\u009b\u02f3\3\2\2\2\u009d\u02fb\3\2\2\2\u009f"+ - "\u0302\3\2\2\2\u00a1\u030a\3\2\2\2\u00a3\u0311\3\2\2\2\u00a5\u0316\3\2"+ - "\2\2\u00a7\u031a\3\2\2\2\u00a9\u0320\3\2\2\2\u00ab\u0327\3\2\2\2\u00ad"+ - "\u032c\3\2\2\2\u00af\u0331\3\2\2\2\u00b1\u0336\3\2\2\2\u00b3\u0339\3\2"+ - "\2\2\u00b5\u033e\3\2\2\2\u00b7\u0344\3\2\2\2\u00b9\u034a\3\2\2\2\u00bb"+ - "\u0351\3\2\2\2\u00bd\u0356\3\2\2\2\u00bf\u035c\3\2\2\2\u00c1\u0361\3\2"+ - "\2\2\u00c3\u0366\3\2\2\2\u00c5\u036c\3\2\2\2\u00c7\u0374\3\2\2\2\u00c9"+ - "\u0378\3\2\2\2\u00cb\u037f\3\2\2\2\u00cd\u0382\3\2\2\2\u00cf\u0385\3\2"+ - "\2\2\u00d1\u0389\3\2\2\2\u00d3\u038f\3\2\2\2\u00d5\u0391\3\2\2\2\u00d7"+ - "\u0393\3\2\2\2\u00d9\u039b\3\2\2\2\u00db\u039d\3\2\2\2\u00dd\u039f\3\2"+ - "\2\2\u00df\u03a2\3\2\2\2\u00e1\u03a4\3\2\2\2\u00e3\u03a7\3\2\2\2\u00e5"+ - "\u03a9\3\2\2\2\u00e7\u03ab\3\2\2\2\u00e9\u03ad\3\2\2\2\u00eb\u03af\3\2"+ - "\2\2\u00ed\u03b1\3\2\2\2\u00ef\u03b4\3\2\2\2\u00f1\u03b7\3\2\2\2\u00f3"+ - "\u03b9\3\2\2\2\u00f5\u03bb\3\2\2\2\u00f7\u03c7\3\2\2\2\u00f9\u03f5\3\2"+ - "\2\2\u00fb\u03f9\3\2\2\2\u00fd\u0403\3\2\2\2\u00ff\u040e\3\2\2\2\u0101"+ - "\u0412\3\2\2\2\u0103\u041d\3\2\2\2\u0105\u0428\3\2\2\2\u0107\u0431\3\2"+ - "\2\2\u0109\u0433\3\2\2\2\u010b\u0435\3\2\2\2\u010d\u0446\3\2\2\2\u010f"+ - "\u0456\3\2\2\2\u0111\u045c\3\2\2\2\u0113\u0114\7*\2\2\u0114\4\3\2\2\2"+ - "\u0115\u0116\7+\2\2\u0116\6\3\2\2\2\u0117\u0118\7.\2\2\u0118\b\3\2\2\2"+ - "\u0119\u011a\7<\2\2\u011a\n\3\2\2\2\u011b\u011c\7C\2\2\u011c\u011d\7N"+ - "\2\2\u011d\u011e\7N\2\2\u011e\f\3\2\2\2\u011f\u0120\7C\2\2\u0120\u0121"+ - "\7P\2\2\u0121\u0122\7C\2\2\u0122\u0123\7N\2\2\u0123\u0124\7[\2\2\u0124"+ - "\u0125\7\\\2\2\u0125\u0126\7G\2\2\u0126\16\3\2\2\2\u0127\u0128\7C\2\2"+ - "\u0128\u0129\7P\2\2\u0129\u012a\7C\2\2\u012a\u012b\7N\2\2\u012b\u012c"+ - "\7[\2\2\u012c\u012d\7\\\2\2\u012d\u012e\7G\2\2\u012e\u012f\7F\2\2\u012f"+ - "\20\3\2\2\2\u0130\u0131\7C\2\2\u0131\u0132\7P\2\2\u0132\u0133\7F\2\2\u0133"+ - "\22\3\2\2\2\u0134\u0135\7C\2\2\u0135\u0136\7P\2\2\u0136\u0137\7[\2\2\u0137"+ - "\24\3\2\2\2\u0138\u0139\7C\2\2\u0139\u013a\7U\2\2\u013a\26\3\2\2\2\u013b"+ - "\u013c\7C\2\2\u013c\u013d\7U\2\2\u013d\u013e\7E\2\2\u013e\30\3\2\2\2\u013f"+ - "\u0140\7D\2\2\u0140\u0141\7G\2\2\u0141\u0142\7V\2\2\u0142\u0143\7Y\2\2"+ - "\u0143\u0144\7G\2\2\u0144\u0145\7G\2\2\u0145\u0146\7P\2\2\u0146\32\3\2"+ - "\2\2\u0147\u0148\7D\2\2\u0148\u0149\7[\2\2\u0149\34\3\2\2\2\u014a\u014b"+ - "\7E\2\2\u014b\u014c\7C\2\2\u014c\u014d\7U\2\2\u014d\u014e\7G\2\2\u014e"+ - "\36\3\2\2\2\u014f\u0150\7E\2\2\u0150\u0151\7C\2\2\u0151\u0152\7U\2\2\u0152"+ - "\u0153\7V\2\2\u0153 \3\2\2\2\u0154\u0155\7E\2\2\u0155\u0156\7C\2\2\u0156"+ - "\u0157\7V\2\2\u0157\u0158\7C\2\2\u0158\u0159\7N\2\2\u0159\u015a\7Q\2\2"+ - "\u015a\u015b\7I\2\2\u015b\"\3\2\2\2\u015c\u015d\7E\2\2\u015d\u015e\7C"+ - "\2\2\u015e\u015f\7V\2\2\u015f\u0160\7C\2\2\u0160\u0161\7N\2\2\u0161\u0162"+ - "\7Q\2\2\u0162\u0163\7I\2\2\u0163\u0164\7U\2\2\u0164$\3\2\2\2\u0165\u0166"+ - "\7E\2\2\u0166\u0167\7Q\2\2\u0167\u0168\7N\2\2\u0168\u0169\7W\2\2\u0169"+ - "\u016a\7O\2\2\u016a\u016b\7P\2\2\u016b\u016c\7U\2\2\u016c&\3\2\2\2\u016d"+ - "\u016e\7E\2\2\u016e\u016f\7Q\2\2\u016f\u0170\7P\2\2\u0170\u0171\7X\2\2"+ - "\u0171\u0172\7G\2\2\u0172\u0173\7T\2\2\u0173\u0174\7V\2\2\u0174(\3\2\2"+ - "\2\u0175\u0176\7E\2\2\u0176\u0177\7W\2\2\u0177\u0178\7T\2\2\u0178\u0179"+ - "\7T\2\2\u0179\u017a\7G\2\2\u017a\u017b\7P\2\2\u017b\u017c\7V\2\2\u017c"+ - "\u017d\7a\2\2\u017d\u017e\7F\2\2\u017e\u017f\7C\2\2\u017f\u0180\7V\2\2"+ - "\u0180\u0181\7G\2\2\u0181*\3\2\2\2\u0182\u0183\7E\2\2\u0183\u0184\7W\2"+ - "\2\u0184\u0185\7T\2\2\u0185\u0186\7T\2\2\u0186\u0187\7G\2\2\u0187\u0188"+ - "\7P\2\2\u0188\u0189\7V\2\2\u0189\u018a\7a\2\2\u018a\u018b\7V\2\2\u018b"+ - "\u018c\7K\2\2\u018c\u018d\7O\2\2\u018d\u018e\7G\2\2\u018e,\3\2\2\2\u018f"+ - "\u0190\7E\2\2\u0190\u0191\7W\2\2\u0191\u0192\7T\2\2\u0192\u0193\7T\2\2"+ - "\u0193\u0194\7G\2\2\u0194\u0195\7P\2\2\u0195\u0196\7V\2\2\u0196\u0197"+ - "\7a\2\2\u0197\u0198\7V\2\2\u0198\u0199\7K\2\2\u0199\u019a\7O\2\2\u019a"+ - "\u019b\7G\2\2\u019b\u019c\7U\2\2\u019c\u019d\7V\2\2\u019d\u019e\7C\2\2"+ - "\u019e\u019f\7O\2\2\u019f\u01a0\7R\2\2\u01a0.\3\2\2\2\u01a1\u01a2\7F\2"+ - "\2\u01a2\u01a3\7C\2\2\u01a3\u01a4\7[\2\2\u01a4\60\3\2\2\2\u01a5\u01a6"+ - "\7F\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7[\2\2\u01a8\u01a9\7U\2\2\u01a9"+ - "\62\3\2\2\2\u01aa\u01ab\7F\2\2\u01ab\u01ac\7G\2\2\u01ac\u01ad\7D\2\2\u01ad"+ - "\u01ae\7W\2\2\u01ae\u01af\7I\2\2\u01af\64\3\2\2\2\u01b0\u01b1\7F\2\2\u01b1"+ - "\u01b2\7G\2\2\u01b2\u01b3\7U\2\2\u01b3\u01b4\7E\2\2\u01b4\66\3\2\2\2\u01b5"+ - "\u01b6\7F\2\2\u01b6\u01b7\7G\2\2\u01b7\u01b8\7U\2\2\u01b8\u01b9\7E\2\2"+ - "\u01b9\u01ba\7T\2\2\u01ba\u01bb\7K\2\2\u01bb\u01bc\7D\2\2\u01bc\u01bd"+ - "\7G\2\2\u01bd8\3\2\2\2\u01be\u01bf\7F\2\2\u01bf\u01c0\7K\2\2\u01c0\u01c1"+ - "\7U\2\2\u01c1\u01c2\7V\2\2\u01c2\u01c3\7K\2\2\u01c3\u01c4\7P\2\2\u01c4"+ - "\u01c5\7E\2\2\u01c5\u01c6\7V\2\2\u01c6:\3\2\2\2\u01c7\u01c8\7G\2\2\u01c8"+ - "\u01c9\7N\2\2\u01c9\u01ca\7U\2\2\u01ca\u01cb\7G\2\2\u01cb<\3\2\2\2\u01cc"+ - "\u01cd\7G\2\2\u01cd\u01ce\7P\2\2\u01ce\u01cf\7F\2\2\u01cf>\3\2\2\2\u01d0"+ - "\u01d1\7G\2\2\u01d1\u01d2\7U\2\2\u01d2\u01d3\7E\2\2\u01d3\u01d4\7C\2\2"+ - "\u01d4\u01d5\7R\2\2\u01d5\u01d6\7G\2\2\u01d6@\3\2\2\2\u01d7\u01d8\7G\2"+ - "\2\u01d8\u01d9\7Z\2\2\u01d9\u01da\7G\2\2\u01da\u01db\7E\2\2\u01db\u01dc"+ - "\7W\2\2\u01dc\u01dd\7V\2\2\u01dd\u01de\7C\2\2\u01de\u01df\7D\2\2\u01df"+ - "\u01e0\7N\2\2\u01e0\u01e1\7G\2\2\u01e1B\3\2\2\2\u01e2\u01e3\7G\2\2\u01e3"+ - "\u01e4\7Z\2\2\u01e4\u01e5\7K\2\2\u01e5\u01e6\7U\2\2\u01e6\u01e7\7V\2\2"+ - "\u01e7\u01e8\7U\2\2\u01e8D\3\2\2\2\u01e9\u01ea\7G\2\2\u01ea\u01eb\7Z\2"+ - "\2\u01eb\u01ec\7R\2\2\u01ec\u01ed\7N\2\2\u01ed\u01ee\7C\2\2\u01ee\u01ef"+ - "\7K\2\2\u01ef\u01f0\7P\2\2\u01f0F\3\2\2\2\u01f1\u01f2\7G\2\2\u01f2\u01f3"+ - "\7Z\2\2\u01f3\u01f4\7V\2\2\u01f4\u01f5\7T\2\2\u01f5\u01f6\7C\2\2\u01f6"+ - "\u01f7\7E\2\2\u01f7\u01f8\7V\2\2\u01f8H\3\2\2\2\u01f9\u01fa\7H\2\2\u01fa"+ - "\u01fb\7C\2\2\u01fb\u01fc\7N\2\2\u01fc\u01fd\7U\2\2\u01fd\u01fe\7G\2\2"+ - "\u01feJ\3\2\2\2\u01ff\u0200\7H\2\2\u0200\u0201\7K\2\2\u0201\u0202\7T\2"+ - "\2\u0202\u0203\7U\2\2\u0203\u0204\7V\2\2\u0204L\3\2\2\2\u0205\u0206\7"+ - "H\2\2\u0206\u0207\7Q\2\2\u0207\u0208\7T\2\2\u0208\u0209\7O\2\2\u0209\u020a"+ - "\7C\2\2\u020a\u020b\7V\2\2\u020bN\3\2\2\2\u020c\u020d\7H\2\2\u020d\u020e"+ - "\7T\2\2\u020e\u020f\7Q\2\2\u020f\u0210\7O\2\2\u0210P\3\2\2\2\u0211\u0212"+ - "\7H\2\2\u0212\u0213\7W\2\2\u0213\u0214\7N\2\2\u0214\u0215\7N\2\2\u0215"+ - "R\3\2\2\2\u0216\u0217\7H\2\2\u0217\u0218\7W\2\2\u0218\u0219\7P\2\2\u0219"+ - "\u021a\7E\2\2\u021a\u021b\7V\2\2\u021b\u021c\7K\2\2\u021c\u021d\7Q\2\2"+ - "\u021d\u021e\7P\2\2\u021e\u021f\7U\2\2\u021fT\3\2\2\2\u0220\u0221\7I\2"+ - "\2\u0221\u0222\7T\2\2\u0222\u0223\7C\2\2\u0223\u0224\7R\2\2\u0224\u0225"+ - "\7J\2\2\u0225\u0226\7X\2\2\u0226\u0227\7K\2\2\u0227\u0228\7\\\2\2\u0228"+ - "V\3\2\2\2\u0229\u022a\7I\2\2\u022a\u022b\7T\2\2\u022b\u022c\7Q\2\2\u022c"+ - "\u022d\7W\2\2\u022d\u022e\7R\2\2\u022eX\3\2\2\2\u022f\u0230\7J\2\2\u0230"+ - "\u0231\7C\2\2\u0231\u0232\7X\2\2\u0232\u0233\7K\2\2\u0233\u0234\7P\2\2"+ - "\u0234\u0235\7I\2\2\u0235Z\3\2\2\2\u0236\u0237\7J\2\2\u0237\u0238\7Q\2"+ - "\2\u0238\u0239\7W\2\2\u0239\u023a\7T\2\2\u023a\\\3\2\2\2\u023b\u023c\7"+ - "J\2\2\u023c\u023d\7Q\2\2\u023d\u023e\7W\2\2\u023e\u023f\7T\2\2\u023f\u0240"+ - "\7U\2\2\u0240^\3\2\2\2\u0241\u0242\7K\2\2\u0242\u0243\7P\2\2\u0243`\3"+ - "\2\2\2\u0244\u0245\7K\2\2\u0245\u0246\7P\2\2\u0246\u0247\7P\2\2\u0247"+ - "\u0248\7G\2\2\u0248\u0249\7T\2\2\u0249b\3\2\2\2\u024a\u024b\7K\2\2\u024b"+ - "\u024c\7P\2\2\u024c\u024d\7V\2\2\u024d\u024e\7G\2\2\u024e\u024f\7T\2\2"+ - "\u024f\u0250\7X\2\2\u0250\u0251\7C\2\2\u0251\u0252\7N\2\2\u0252d\3\2\2"+ - "\2\u0253\u0254\7K\2\2\u0254\u0255\7U\2\2\u0255f\3\2\2\2\u0256\u0257\7"+ - "L\2\2\u0257\u0258\7Q\2\2\u0258\u0259\7K\2\2\u0259\u025a\7P\2\2\u025ah"+ - "\3\2\2\2\u025b\u025c\7N\2\2\u025c\u025d\7C\2\2\u025d\u025e\7U\2\2\u025e"+ - "\u025f\7V\2\2\u025fj\3\2\2\2\u0260\u0261\7N\2\2\u0261\u0262\7G\2\2\u0262"+ - "\u0263\7H\2\2\u0263\u0264\7V\2\2\u0264l\3\2\2\2\u0265\u0266\7N\2\2\u0266"+ - "\u0267\7K\2\2\u0267\u0268\7M\2\2\u0268\u0269\7G\2\2\u0269n\3\2\2\2\u026a"+ - "\u026b\7N\2\2\u026b\u026c\7K\2\2\u026c\u026d\7O\2\2\u026d\u026e\7K\2\2"+ - "\u026e\u026f\7V\2\2\u026fp\3\2\2\2\u0270\u0271\7O\2\2\u0271\u0272\7C\2"+ - "\2\u0272\u0273\7R\2\2\u0273\u0274\7R\2\2\u0274\u0275\7G\2\2\u0275\u0276"+ - "\7F\2\2\u0276r\3\2\2\2\u0277\u0278\7O\2\2\u0278\u0279\7C\2\2\u0279\u027a"+ - "\7V\2\2\u027a\u027b\7E\2\2\u027b\u027c\7J\2\2\u027ct\3\2\2\2\u027d\u027e"+ - "\7O\2\2\u027e\u027f\7K\2\2\u027f\u0280\7P\2\2\u0280\u0281\7W\2\2\u0281"+ - "\u0282\7V\2\2\u0282\u0283\7G\2\2\u0283v\3\2\2\2\u0284\u0285\7O\2\2\u0285"+ - "\u0286\7K\2\2\u0286\u0287\7P\2\2\u0287\u0288\7W\2\2\u0288\u0289\7V\2\2"+ - "\u0289\u028a\7G\2\2\u028a\u028b\7U\2\2\u028bx\3\2\2\2\u028c\u028d\7O\2"+ - "\2\u028d\u028e\7Q\2\2\u028e\u028f\7P\2\2\u028f\u0290\7V\2\2\u0290\u0291"+ - "\7J\2\2\u0291z\3\2\2\2\u0292\u0293\7O\2\2\u0293\u0294\7Q\2\2\u0294\u0295"+ - "\7P\2\2\u0295\u0296\7V\2\2\u0296\u0297\7J\2\2\u0297\u0298\7U\2\2\u0298"+ - "|\3\2\2\2\u0299\u029a\7P\2\2\u029a\u029b\7C\2\2\u029b\u029c\7V\2\2\u029c"+ - "\u029d\7W\2\2\u029d\u029e\7T\2\2\u029e\u029f\7C\2\2\u029f\u02a0\7N\2\2"+ - "\u02a0~\3\2\2\2\u02a1\u02a2\7P\2\2\u02a2\u02a3\7Q\2\2\u02a3\u02a4\7V\2"+ - "\2\u02a4\u0080\3\2\2\2\u02a5\u02a6\7P\2\2\u02a6\u02a7\7W\2\2\u02a7\u02a8"+ - "\7N\2\2\u02a8\u02a9\7N\2\2\u02a9\u0082\3\2\2\2\u02aa\u02ab\7P\2\2\u02ab"+ - "\u02ac\7W\2\2\u02ac\u02ad\7N\2\2\u02ad\u02ae\7N\2\2\u02ae\u02af\7U\2\2"+ - "\u02af\u0084\3\2\2\2\u02b0\u02b1\7Q\2\2\u02b1\u02b2\7P\2\2\u02b2\u0086"+ - "\3\2\2\2\u02b3\u02b4\7Q\2\2\u02b4\u02b5\7R\2\2\u02b5\u02b6\7V\2\2\u02b6"+ - "\u02b7\7K\2\2\u02b7\u02b8\7O\2\2\u02b8\u02b9\7K\2\2\u02b9\u02ba\7\\\2"+ - "\2\u02ba\u02bb\7G\2\2\u02bb\u02bc\7F\2\2\u02bc\u0088\3\2\2\2\u02bd\u02be"+ - "\7Q\2\2\u02be\u02bf\7T\2\2\u02bf\u008a\3\2\2\2\u02c0\u02c1\7Q\2\2\u02c1"+ - "\u02c2\7T\2\2\u02c2\u02c3\7F\2\2\u02c3\u02c4\7G\2\2\u02c4\u02c5\7T\2\2"+ - "\u02c5\u008c\3\2\2\2\u02c6\u02c7\7Q\2\2\u02c7\u02c8\7W\2\2\u02c8\u02c9"+ - "\7V\2\2\u02c9\u02ca\7G\2\2\u02ca\u02cb\7T\2\2\u02cb\u008e\3\2\2\2\u02cc"+ - "\u02cd\7R\2\2\u02cd\u02ce\7C\2\2\u02ce\u02cf\7T\2\2\u02cf\u02d0\7U\2\2"+ - "\u02d0\u02d1\7G\2\2\u02d1\u02d2\7F\2\2\u02d2\u0090\3\2\2\2\u02d3\u02d4"+ - "\7R\2\2\u02d4\u02d5\7J\2\2\u02d5\u02d6\7[\2\2\u02d6\u02d7\7U\2\2\u02d7"+ - "\u02d8\7K\2\2\u02d8\u02d9\7E\2\2\u02d9\u02da\7C\2\2\u02da\u02db\7N\2\2"+ - "\u02db\u0092\3\2\2\2\u02dc\u02dd\7R\2\2\u02dd\u02de\7N\2\2\u02de\u02df"+ - "\7C\2\2\u02df\u02e0\7P\2\2\u02e0\u0094\3\2\2\2\u02e1\u02e2\7T\2\2\u02e2"+ - "\u02e3\7K\2\2\u02e3\u02e4\7I\2\2\u02e4\u02e5\7J\2\2\u02e5\u02e6\7V\2\2"+ - "\u02e6\u0096\3\2\2\2\u02e7\u02e8\7T\2\2\u02e8\u02e9\7N\2\2\u02e9\u02ea"+ - "\7K\2\2\u02ea\u02eb\7M\2\2\u02eb\u02ec\7G\2\2\u02ec\u0098\3\2\2\2\u02ed"+ - "\u02ee\7S\2\2\u02ee\u02ef\7W\2\2\u02ef\u02f0\7G\2\2\u02f0\u02f1\7T\2\2"+ - "\u02f1\u02f2\7[\2\2\u02f2\u009a\3\2\2\2\u02f3\u02f4\7U\2\2\u02f4\u02f5"+ - "\7E\2\2\u02f5\u02f6\7J\2\2\u02f6\u02f7\7G\2\2\u02f7\u02f8\7O\2\2\u02f8"+ - "\u02f9\7C\2\2\u02f9\u02fa\7U\2\2\u02fa\u009c\3\2\2\2\u02fb\u02fc\7U\2"+ - "\2\u02fc\u02fd\7G\2\2\u02fd\u02fe\7E\2\2\u02fe\u02ff\7Q\2\2\u02ff\u0300"+ - "\7P\2\2\u0300\u0301\7F\2\2\u0301\u009e\3\2\2\2\u0302\u0303\7U\2\2\u0303"+ - "\u0304\7G\2\2\u0304\u0305\7E\2\2\u0305\u0306\7Q\2\2\u0306\u0307\7P\2\2"+ - "\u0307\u0308\7F\2\2\u0308\u0309\7U\2\2\u0309\u00a0\3\2\2\2\u030a\u030b"+ - "\7U\2\2\u030b\u030c\7G\2\2\u030c\u030d\7N\2\2\u030d\u030e\7G\2\2\u030e"+ - "\u030f\7E\2\2\u030f\u0310\7V\2\2\u0310\u00a2\3\2\2\2\u0311\u0312\7U\2"+ - "\2\u0312\u0313\7J\2\2\u0313\u0314\7Q\2\2\u0314\u0315\7Y\2\2\u0315\u00a4"+ - "\3\2\2\2\u0316\u0317\7U\2\2\u0317\u0318\7[\2\2\u0318\u0319\7U\2\2\u0319"+ - "\u00a6\3\2\2\2\u031a\u031b\7V\2\2\u031b\u031c\7C\2\2\u031c\u031d\7D\2"+ - "\2\u031d\u031e\7N\2\2\u031e\u031f\7G\2\2\u031f\u00a8\3\2\2\2\u0320\u0321"+ - "\7V\2\2\u0321\u0322\7C\2\2\u0322\u0323\7D\2\2\u0323\u0324\7N\2\2\u0324"+ - "\u0325\7G\2\2\u0325\u0326\7U\2\2\u0326\u00aa\3\2\2\2\u0327\u0328\7V\2"+ - "\2\u0328\u0329\7G\2\2\u0329\u032a\7Z\2\2\u032a\u032b\7V\2\2\u032b\u00ac"+ - "\3\2\2\2\u032c\u032d\7V\2\2\u032d\u032e\7J\2\2\u032e\u032f\7G\2\2\u032f"+ - "\u0330\7P\2\2\u0330\u00ae\3\2\2\2\u0331\u0332\7V\2\2\u0332\u0333\7T\2"+ - "\2\u0333\u0334\7W\2\2\u0334\u0335\7G\2\2\u0335\u00b0\3\2\2\2\u0336\u0337"+ - "\7V\2\2\u0337\u0338\7Q\2\2\u0338\u00b2\3\2\2\2\u0339\u033a\7V\2\2\u033a"+ - "\u033b\7[\2\2\u033b\u033c\7R\2\2\u033c\u033d\7G\2\2\u033d\u00b4\3\2\2"+ - "\2\u033e\u033f\7V\2\2\u033f\u0340\7[\2\2\u0340\u0341\7R\2\2\u0341\u0342"+ - "\7G\2\2\u0342\u0343\7U\2\2\u0343\u00b6\3\2\2\2\u0344\u0345\7W\2\2\u0345"+ - "\u0346\7U\2\2\u0346\u0347\7K\2\2\u0347\u0348\7P\2\2\u0348\u0349\7I\2\2"+ - "\u0349\u00b8\3\2\2\2\u034a\u034b\7X\2\2\u034b\u034c\7G\2\2\u034c\u034d"+ - "\7T\2\2\u034d\u034e\7K\2\2\u034e\u034f\7H\2\2\u034f\u0350\7[\2\2\u0350"+ - "\u00ba\3\2\2\2\u0351\u0352\7Y\2\2\u0352\u0353\7J\2\2\u0353\u0354\7G\2"+ - "\2\u0354\u0355\7P\2\2\u0355\u00bc\3\2\2\2\u0356\u0357\7Y\2\2\u0357\u0358"+ - "\7J\2\2\u0358\u0359\7G\2\2\u0359\u035a\7T\2\2\u035a\u035b\7G\2\2\u035b"+ - "\u00be\3\2\2\2\u035c\u035d\7Y\2\2\u035d\u035e\7K\2\2\u035e\u035f\7V\2"+ - "\2\u035f\u0360\7J\2\2\u0360\u00c0\3\2\2\2\u0361\u0362\7[\2\2\u0362\u0363"+ - "\7G\2\2\u0363\u0364\7C\2\2\u0364\u0365\7T\2\2\u0365\u00c2\3\2\2\2\u0366"+ - "\u0367\7[\2\2\u0367\u0368\7G\2\2\u0368\u0369\7C\2\2\u0369\u036a\7T\2\2"+ - "\u036a\u036b\7U\2\2\u036b\u00c4\3\2\2\2\u036c\u036d\7}\2\2\u036d\u036e"+ - "\7G\2\2\u036e\u036f\7U\2\2\u036f\u0370\7E\2\2\u0370\u0371\7C\2\2\u0371"+ - "\u0372\7R\2\2\u0372\u0373\7G\2\2\u0373\u00c6\3\2\2\2\u0374\u0375\7}\2"+ - "\2\u0375\u0376\7H\2\2\u0376\u0377\7P\2\2\u0377\u00c8\3\2\2\2\u0378\u0379"+ - "\7}\2\2\u0379\u037a\7N\2\2\u037a\u037b\7K\2\2\u037b\u037c\7O\2\2\u037c"+ - "\u037d\7K\2\2\u037d\u037e\7V\2\2\u037e\u00ca\3\2\2\2\u037f\u0380\7}\2"+ - "\2\u0380\u0381\7F\2\2\u0381\u00cc\3\2\2\2\u0382\u0383\7}\2\2\u0383\u0384"+ - "\7V\2\2\u0384\u00ce\3\2\2\2\u0385\u0386\7}\2\2\u0386\u0387\7V\2\2\u0387"+ - "\u0388\7U\2\2\u0388\u00d0\3\2\2\2\u0389\u038a\7}\2\2\u038a\u038b\7I\2"+ - "\2\u038b\u038c\7W\2\2\u038c\u038d\7K\2\2\u038d\u038e\7F\2\2\u038e\u00d2"+ - "\3\2\2\2\u038f\u0390\7\177\2\2\u0390\u00d4\3\2\2\2\u0391\u0392\7?\2\2"+ - "\u0392\u00d6\3\2\2\2\u0393\u0394\7>\2\2\u0394\u0395\7?\2\2\u0395\u0396"+ - "\7@\2\2\u0396\u00d8\3\2\2\2\u0397\u0398\7>\2\2\u0398\u039c\7@\2\2\u0399"+ - "\u039a\7#\2\2\u039a\u039c\7?\2\2\u039b\u0397\3\2\2\2\u039b\u0399\3\2\2"+ - "\2\u039c\u00da\3\2\2\2\u039d\u039e\7>\2\2\u039e\u00dc\3\2\2\2\u039f\u03a0"+ - "\7>\2\2\u03a0\u03a1\7?\2\2\u03a1\u00de\3\2\2\2\u03a2\u03a3\7@\2\2\u03a3"+ - "\u00e0\3\2\2\2\u03a4\u03a5\7@\2\2\u03a5\u03a6\7?\2\2\u03a6\u00e2\3\2\2"+ - "\2\u03a7\u03a8\7-\2\2\u03a8\u00e4\3\2\2\2\u03a9\u03aa\7/\2\2\u03aa\u00e6"+ - "\3\2\2\2\u03ab\u03ac\7,\2\2\u03ac\u00e8\3\2\2\2\u03ad\u03ae\7\61\2\2\u03ae"+ - "\u00ea\3\2\2\2\u03af\u03b0\7\'\2\2\u03b0\u00ec\3\2\2\2\u03b1\u03b2\7<"+ - "\2\2\u03b2\u03b3\7<\2\2\u03b3\u00ee\3\2\2\2\u03b4\u03b5\7~\2\2\u03b5\u03b6"+ - "\7~\2\2\u03b6\u00f0\3\2\2\2\u03b7\u03b8\7\60\2\2\u03b8\u00f2\3\2\2\2\u03b9"+ - "\u03ba\7A\2\2\u03ba\u00f4\3\2\2\2\u03bb\u03c1\7)\2\2\u03bc\u03c0\n\2\2"+ - "\2\u03bd\u03be\7)\2\2\u03be\u03c0\7)\2\2\u03bf\u03bc\3\2\2\2\u03bf\u03bd"+ - "\3\2\2\2\u03c0\u03c3\3\2\2\2\u03c1\u03bf\3\2\2\2\u03c1\u03c2\3\2\2\2\u03c2"+ - "\u03c4\3\2\2\2\u03c3\u03c1\3\2\2\2\u03c4\u03c5\7)\2\2\u03c5\u00f6\3\2"+ - "\2\2\u03c6\u03c8\5\u0107\u0084\2\u03c7\u03c6\3\2\2\2\u03c8\u03c9\3\2\2"+ - "\2\u03c9\u03c7\3\2\2\2\u03c9\u03ca\3\2\2\2\u03ca\u00f8\3\2\2\2\u03cb\u03cd"+ - "\5\u0107\u0084\2\u03cc\u03cb\3\2\2\2\u03cd\u03ce\3\2\2\2\u03ce\u03cc\3"+ - "\2\2\2\u03ce\u03cf\3\2\2\2\u03cf\u03d0\3\2\2\2\u03d0\u03d4\5\u00f1y\2"+ - "\u03d1\u03d3\5\u0107\u0084\2\u03d2\u03d1\3\2\2\2\u03d3\u03d6\3\2\2\2\u03d4"+ - "\u03d2\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03f6\3\2\2\2\u03d6\u03d4\3\2"+ - "\2\2\u03d7\u03d9\5\u00f1y\2\u03d8\u03da\5\u0107\u0084\2\u03d9\u03d8\3"+ - "\2\2\2\u03da\u03db\3\2\2\2\u03db\u03d9\3\2\2\2\u03db\u03dc\3\2\2\2\u03dc"+ - "\u03f6\3\2\2\2\u03dd\u03df\5\u0107\u0084\2\u03de\u03dd\3\2\2\2\u03df\u03e0"+ - "\3\2\2\2\u03e0\u03de\3\2\2\2\u03e0\u03e1\3\2\2\2\u03e1\u03e9\3\2\2\2\u03e2"+ - "\u03e6\5\u00f1y\2\u03e3\u03e5\5\u0107\u0084\2\u03e4\u03e3\3\2\2\2\u03e5"+ - "\u03e8\3\2\2\2\u03e6\u03e4\3\2\2\2\u03e6\u03e7\3\2\2\2\u03e7\u03ea\3\2"+ - "\2\2\u03e8\u03e6\3\2\2\2\u03e9\u03e2\3\2\2\2\u03e9\u03ea\3\2\2\2\u03ea"+ - "\u03eb\3\2\2\2\u03eb\u03ec\5\u0105\u0083\2\u03ec\u03f6\3\2\2\2\u03ed\u03ef"+ - "\5\u00f1y\2\u03ee\u03f0\5\u0107\u0084\2\u03ef\u03ee\3\2\2\2\u03f0\u03f1"+ - "\3\2\2\2\u03f1\u03ef\3\2\2\2\u03f1\u03f2\3\2\2\2\u03f2\u03f3\3\2\2\2\u03f3"+ - "\u03f4\5\u0105\u0083\2\u03f4\u03f6\3\2\2\2\u03f5\u03cc\3\2\2\2\u03f5\u03d7"+ - "\3\2\2\2\u03f5\u03de\3\2\2\2\u03f5\u03ed\3\2\2\2\u03f6\u00fa\3\2\2\2\u03f7"+ - "\u03fa\5\u0109\u0085\2\u03f8\u03fa\7a\2\2\u03f9\u03f7\3\2\2\2\u03f9\u03f8"+ - "\3\2\2\2\u03fa\u0400\3\2\2\2\u03fb\u03ff\5\u0109\u0085\2\u03fc\u03ff\5"+ - "\u0107\u0084\2\u03fd\u03ff\t\3\2\2\u03fe\u03fb\3\2\2\2\u03fe\u03fc\3\2"+ - "\2\2\u03fe\u03fd\3\2\2\2\u03ff\u0402\3\2\2\2\u0400\u03fe\3\2\2\2\u0400"+ - "\u0401\3\2\2\2\u0401\u00fc\3\2\2\2\u0402\u0400\3\2\2\2\u0403\u0407\5\u0107"+ - "\u0084\2\u0404\u0408\5\u0109\u0085\2\u0405\u0408\5\u0107\u0084\2\u0406"+ - "\u0408\t\3\2\2\u0407\u0404\3\2\2\2\u0407\u0405\3\2\2\2\u0407\u0406\3\2"+ - "\2\2\u0408\u0409\3\2\2\2\u0409\u0407\3\2\2\2\u0409\u040a\3\2\2\2\u040a"+ - "\u00fe\3\2\2\2\u040b\u040f\5\u0109\u0085\2\u040c\u040f\5\u0107\u0084\2"+ - "\u040d\u040f\7a\2\2\u040e\u040b\3\2\2\2\u040e\u040c\3\2\2\2\u040e\u040d"+ - "\3\2\2\2\u040f\u0410\3\2\2\2\u0410\u040e\3\2\2\2\u0410\u0411\3\2\2\2\u0411"+ - "\u0100\3\2\2\2\u0412\u0418\7$\2\2\u0413\u0417\n\4\2\2\u0414\u0415\7$\2"+ - "\2\u0415\u0417\7$\2\2\u0416\u0413\3\2\2\2\u0416\u0414\3\2\2\2\u0417\u041a"+ - "\3\2\2\2\u0418\u0416\3\2\2\2\u0418\u0419\3\2\2\2\u0419\u041b\3\2\2\2\u041a"+ - "\u0418\3\2\2\2\u041b\u041c\7$\2\2\u041c\u0102\3\2\2\2\u041d\u0423\7b\2"+ - "\2\u041e\u0422\n\5\2\2\u041f\u0420\7b\2\2\u0420\u0422\7b\2\2\u0421\u041e"+ - "\3\2\2\2\u0421\u041f\3\2\2\2\u0422\u0425\3\2\2\2\u0423\u0421\3\2\2\2\u0423"+ - "\u0424\3\2\2\2\u0424\u0426\3\2\2\2\u0425\u0423\3\2\2\2\u0426\u0427\7b"+ - "\2\2\u0427\u0104\3\2\2\2\u0428\u042a\7G\2\2\u0429\u042b\t\6\2\2\u042a"+ - "\u0429\3\2\2\2\u042a\u042b\3\2\2\2\u042b\u042d\3\2\2\2\u042c\u042e\5\u0107"+ - "\u0084\2\u042d\u042c\3\2\2\2\u042e\u042f\3\2\2\2\u042f\u042d\3\2\2\2\u042f"+ - "\u0430\3\2\2\2\u0430\u0106\3\2\2\2\u0431\u0432\t\7\2\2\u0432\u0108\3\2"+ - "\2\2\u0433\u0434\t\b\2\2\u0434\u010a\3\2\2\2\u0435\u0436\7/\2\2\u0436"+ - "\u0437\7/\2\2\u0437\u043b\3\2\2\2\u0438\u043a\n\t\2\2\u0439\u0438\3\2"+ - "\2\2\u043a\u043d\3\2\2\2\u043b\u0439\3\2\2\2\u043b\u043c\3\2\2\2\u043c"+ - "\u043f\3\2\2\2\u043d\u043b\3\2\2\2\u043e\u0440\7\17\2\2\u043f\u043e\3"+ - "\2\2\2\u043f\u0440\3\2\2\2\u0440\u0442\3\2\2\2\u0441\u0443\7\f\2\2\u0442"+ - "\u0441\3\2\2\2\u0442\u0443\3\2\2\2\u0443\u0444\3\2\2\2\u0444\u0445\b\u0086"+ - "\2\2\u0445\u010c\3\2\2\2\u0446\u0447\7\61\2\2\u0447\u0448\7,\2\2\u0448"+ - "\u044d\3\2\2\2\u0449\u044c\5\u010d\u0087\2\u044a\u044c\13\2\2\2\u044b"+ - "\u0449\3\2\2\2\u044b\u044a\3\2\2\2\u044c\u044f\3\2\2\2\u044d\u044e\3\2"+ - "\2\2\u044d\u044b\3\2\2\2\u044e\u0450\3\2\2\2\u044f\u044d\3\2\2\2\u0450"+ - "\u0451\7,\2\2\u0451\u0452\7\61\2\2\u0452\u0453\3\2\2\2\u0453\u0454\b\u0087"+ - "\2\2\u0454\u010e\3\2\2\2\u0455\u0457\t\n\2\2\u0456\u0455\3\2\2\2\u0457"+ - "\u0458\3\2\2\2\u0458\u0456\3\2\2\2\u0458\u0459\3\2\2\2\u0459\u045a\3\2"+ - "\2\2\u045a\u045b\b\u0088\2\2\u045b\u0110\3\2\2\2\u045c\u045d\13\2\2\2"+ - "\u045d\u0112\3\2\2\2\"\2\u039b\u03bf\u03c1\u03c9\u03ce\u03d4\u03db\u03e0"+ - "\u03e6\u03e9\u03f1\u03f5\u03f9\u03fe\u0400\u0407\u0409\u040e\u0410\u0416"+ - "\u0418\u0421\u0423\u042a\u042f\u043b\u043f\u0442\u044b\u044d\u0458\3\2"+ - "\3\2"; + "\t\u0089\4\u008a\t\u008a\4\u008b\t\u008b\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3"+ + "\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b"+ + "\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3"+ + "\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3"+ + "\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3"+ + "\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3"+ + "\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3"+ + "\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3"+ + "\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3"+ + "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ + "\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3"+ + "\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3"+ + "\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3"+ + "\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3"+ + "!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3"+ + "$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3"+ + "\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3"+ + "+\3+\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3"+ + "-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\3"+ + "\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3"+ + "\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3"+ + "\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\3"+ + "9\39\39\39\39\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3"+ + "<\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3@\3"+ + "@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3C\3C\3C\3C\3C\3"+ + "D\3D\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3H\3"+ + "H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3"+ + "K\3K\3K\3K\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3"+ + "O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3"+ + "R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3U\3U\3U\3U\3V\3V\3V\3"+ + "V\3V\3V\3W\3W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3"+ + "Z\3Z\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]\3]\3^\3^\3^\3^\3^\3^"+ + "\3_\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3b\3b\3b\3b\3b"+ + "\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e\3e\3e\3e\3f\3f\3f\3f"+ + "\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3i\3i\3i\3j\3j\3j\3j\3k\3k\3k\3k\3k\3k"+ + "\3l\3l\3m\3m\3n\3n\3n\3n\3o\3o\3o\3o\5o\u03af\no\3p\3p\3q\3q\3q\3r\3r"+ + "\3s\3s\3s\3t\3t\3u\3u\3v\3v\3w\3w\3x\3x\3y\3y\3y\3z\3z\3z\3{\3{\3|\3|"+ + "\3}\3}\3}\3}\7}\u03d3\n}\f}\16}\u03d6\13}\3}\3}\3~\6~\u03db\n~\r~\16~"+ + "\u03dc\3\177\6\177\u03e0\n\177\r\177\16\177\u03e1\3\177\3\177\7\177\u03e6"+ + "\n\177\f\177\16\177\u03e9\13\177\3\177\3\177\6\177\u03ed\n\177\r\177\16"+ + "\177\u03ee\3\177\6\177\u03f2\n\177\r\177\16\177\u03f3\3\177\3\177\7\177"+ + "\u03f8\n\177\f\177\16\177\u03fb\13\177\5\177\u03fd\n\177\3\177\3\177\3"+ + "\177\3\177\6\177\u0403\n\177\r\177\16\177\u0404\3\177\3\177\5\177\u0409"+ + "\n\177\3\u0080\3\u0080\5\u0080\u040d\n\u0080\3\u0080\3\u0080\3\u0080\7"+ + "\u0080\u0412\n\u0080\f\u0080\16\u0080\u0415\13\u0080\3\u0081\3\u0081\3"+ + "\u0081\3\u0081\6\u0081\u041b\n\u0081\r\u0081\16\u0081\u041c\3\u0082\3"+ + "\u0082\3\u0082\6\u0082\u0422\n\u0082\r\u0082\16\u0082\u0423\3\u0083\3"+ + "\u0083\3\u0083\3\u0083\7\u0083\u042a\n\u0083\f\u0083\16\u0083\u042d\13"+ + "\u0083\3\u0083\3\u0083\3\u0084\3\u0084\3\u0084\3\u0084\7\u0084\u0435\n"+ + "\u0084\f\u0084\16\u0084\u0438\13\u0084\3\u0084\3\u0084\3\u0085\3\u0085"+ + "\5\u0085\u043e\n\u0085\3\u0085\6\u0085\u0441\n\u0085\r\u0085\16\u0085"+ + "\u0442\3\u0086\3\u0086\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088"+ + "\7\u0088\u044d\n\u0088\f\u0088\16\u0088\u0450\13\u0088\3\u0088\5\u0088"+ + "\u0453\n\u0088\3\u0088\5\u0088\u0456\n\u0088\3\u0088\3\u0088\3\u0089\3"+ + "\u0089\3\u0089\3\u0089\3\u0089\7\u0089\u045f\n\u0089\f\u0089\16\u0089"+ + "\u0462\13\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u008a\6\u008a"+ + "\u046a\n\u008a\r\u008a\16\u008a\u046b\3\u008a\3\u008a\3\u008b\3\u008b"+ + "\3\u0460\2\u008c\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31"+ + "\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65"+ + "\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64"+ + "g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089"+ + "F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009d"+ + "P\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1"+ + "Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5"+ + "d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9"+ + "n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00ed"+ + "x\u00efy\u00f1z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff"+ + "\u0081\u0101\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\2\u010b\2"+ + "\u010d\2\u010f\u0086\u0111\u0087\u0113\u0088\u0115\u0089\3\2\13\3\2))"+ + "\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17"+ + "\17\"\"\u0491\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+ + "\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+ + "\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+ + "\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+ + "\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+ + "\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+ + "\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+ + "S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3"+ + "\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2"+ + "\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2"+ + "y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083"+ + "\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2"+ + "\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095"+ + "\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2"+ + "\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7"+ + "\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2"+ + "\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9"+ + "\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2"+ + "\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb"+ + "\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2"+ + "\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2\2\2\u00dd"+ + "\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2"+ + "\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef"+ + "\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2"+ + "\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2\2\2\u0101"+ + "\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u010f\3\2\2"+ + "\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\3\u0117\3\2\2\2\5\u0119"+ + "\3\2\2\2\7\u011b\3\2\2\2\t\u011d\3\2\2\2\13\u011f\3\2\2\2\r\u0123\3\2"+ + "\2\2\17\u012b\3\2\2\2\21\u0134\3\2\2\2\23\u0138\3\2\2\2\25\u013c\3\2\2"+ + "\2\27\u013f\3\2\2\2\31\u0143\3\2\2\2\33\u014b\3\2\2\2\35\u014e\3\2\2\2"+ + "\37\u0153\3\2\2\2!\u0158\3\2\2\2#\u0160\3\2\2\2%\u0169\3\2\2\2\'\u0171"+ + "\3\2\2\2)\u0179\3\2\2\2+\u0186\3\2\2\2-\u0193\3\2\2\2/\u01a5\3\2\2\2\61"+ + "\u01a9\3\2\2\2\63\u01ae\3\2\2\2\65\u01b4\3\2\2\2\67\u01b9\3\2\2\29\u01c2"+ + "\3\2\2\2;\u01cb\3\2\2\2=\u01d0\3\2\2\2?\u01d4\3\2\2\2A\u01db\3\2\2\2C"+ + "\u01e6\3\2\2\2E\u01ed\3\2\2\2G\u01f5\3\2\2\2I\u01fd\3\2\2\2K\u0203\3\2"+ + "\2\2M\u0209\3\2\2\2O\u0210\3\2\2\2Q\u0215\3\2\2\2S\u021c\3\2\2\2U\u0221"+ + "\3\2\2\2W\u022b\3\2\2\2Y\u0234\3\2\2\2[\u023a\3\2\2\2]\u0241\3\2\2\2_"+ + "\u0246\3\2\2\2a\u024c\3\2\2\2c\u024f\3\2\2\2e\u0257\3\2\2\2g\u025d\3\2"+ + "\2\2i\u0266\3\2\2\2k\u0269\3\2\2\2m\u026e\3\2\2\2o\u0273\3\2\2\2q\u0278"+ + "\3\2\2\2s\u027d\3\2\2\2u\u0283\3\2\2\2w\u028a\3\2\2\2y\u0290\3\2\2\2{"+ + "\u0297\3\2\2\2}\u029f\3\2\2\2\177\u02a5\3\2\2\2\u0081\u02ac\3\2\2\2\u0083"+ + "\u02b4\3\2\2\2\u0085\u02b8\3\2\2\2\u0087\u02bd\3\2\2\2\u0089\u02c3\3\2"+ + "\2\2\u008b\u02c6\3\2\2\2\u008d\u02d0\3\2\2\2\u008f\u02d3\3\2\2\2\u0091"+ + "\u02d9\3\2\2\2\u0093\u02df\3\2\2\2\u0095\u02e6\3\2\2\2\u0097\u02ef\3\2"+ + "\2\2\u0099\u02f4\3\2\2\2\u009b\u02fa\3\2\2\2\u009d\u0300\3\2\2\2\u009f"+ + "\u0306\3\2\2\2\u00a1\u030e\3\2\2\2\u00a3\u0315\3\2\2\2\u00a5\u031d\3\2"+ + "\2\2\u00a7\u0324\3\2\2\2\u00a9\u0329\3\2\2\2\u00ab\u032d\3\2\2\2\u00ad"+ + "\u0333\3\2\2\2\u00af\u033a\3\2\2\2\u00b1\u033f\3\2\2\2\u00b3\u0344\3\2"+ + "\2\2\u00b5\u0349\3\2\2\2\u00b7\u034c\3\2\2\2\u00b9\u0351\3\2\2\2\u00bb"+ + "\u0357\3\2\2\2\u00bd\u035d\3\2\2\2\u00bf\u0364\3\2\2\2\u00c1\u0369\3\2"+ + "\2\2\u00c3\u036f\3\2\2\2\u00c5\u0374\3\2\2\2\u00c7\u0379\3\2\2\2\u00c9"+ + "\u037f\3\2\2\2\u00cb\u0387\3\2\2\2\u00cd\u038b\3\2\2\2\u00cf\u0392\3\2"+ + "\2\2\u00d1\u0395\3\2\2\2\u00d3\u0398\3\2\2\2\u00d5\u039c\3\2\2\2\u00d7"+ + "\u03a2\3\2\2\2\u00d9\u03a4\3\2\2\2\u00db\u03a6\3\2\2\2\u00dd\u03ae\3\2"+ + "\2\2\u00df\u03b0\3\2\2\2\u00e1\u03b2\3\2\2\2\u00e3\u03b5\3\2\2\2\u00e5"+ + "\u03b7\3\2\2\2\u00e7\u03ba\3\2\2\2\u00e9\u03bc\3\2\2\2\u00eb\u03be\3\2"+ + "\2\2\u00ed\u03c0\3\2\2\2\u00ef\u03c2\3\2\2\2\u00f1\u03c4\3\2\2\2\u00f3"+ + "\u03c7\3\2\2\2\u00f5\u03ca\3\2\2\2\u00f7\u03cc\3\2\2\2\u00f9\u03ce\3\2"+ + "\2\2\u00fb\u03da\3\2\2\2\u00fd\u0408\3\2\2\2\u00ff\u040c\3\2\2\2\u0101"+ + "\u0416\3\2\2\2\u0103\u0421\3\2\2\2\u0105\u0425\3\2\2\2\u0107\u0430\3\2"+ + "\2\2\u0109\u043b\3\2\2\2\u010b\u0444\3\2\2\2\u010d\u0446\3\2\2\2\u010f"+ + "\u0448\3\2\2\2\u0111\u0459\3\2\2\2\u0113\u0469\3\2\2\2\u0115\u046f\3\2"+ + "\2\2\u0117\u0118\7*\2\2\u0118\4\3\2\2\2\u0119\u011a\7+\2\2\u011a\6\3\2"+ + "\2\2\u011b\u011c\7.\2\2\u011c\b\3\2\2\2\u011d\u011e\7<\2\2\u011e\n\3\2"+ + "\2\2\u011f\u0120\7C\2\2\u0120\u0121\7N\2\2\u0121\u0122\7N\2\2\u0122\f"+ + "\3\2\2\2\u0123\u0124\7C\2\2\u0124\u0125\7P\2\2\u0125\u0126\7C\2\2\u0126"+ + "\u0127\7N\2\2\u0127\u0128\7[\2\2\u0128\u0129\7\\\2\2\u0129\u012a\7G\2"+ + "\2\u012a\16\3\2\2\2\u012b\u012c\7C\2\2\u012c\u012d\7P\2\2\u012d\u012e"+ + "\7C\2\2\u012e\u012f\7N\2\2\u012f\u0130\7[\2\2\u0130\u0131\7\\\2\2\u0131"+ + "\u0132\7G\2\2\u0132\u0133\7F\2\2\u0133\20\3\2\2\2\u0134\u0135\7C\2\2\u0135"+ + "\u0136\7P\2\2\u0136\u0137\7F\2\2\u0137\22\3\2\2\2\u0138\u0139\7C\2\2\u0139"+ + "\u013a\7P\2\2\u013a\u013b\7[\2\2\u013b\24\3\2\2\2\u013c\u013d\7C\2\2\u013d"+ + "\u013e\7U\2\2\u013e\26\3\2\2\2\u013f\u0140\7C\2\2\u0140\u0141\7U\2\2\u0141"+ + "\u0142\7E\2\2\u0142\30\3\2\2\2\u0143\u0144\7D\2\2\u0144\u0145\7G\2\2\u0145"+ + "\u0146\7V\2\2\u0146\u0147\7Y\2\2\u0147\u0148\7G\2\2\u0148\u0149\7G\2\2"+ + "\u0149\u014a\7P\2\2\u014a\32\3\2\2\2\u014b\u014c\7D\2\2\u014c\u014d\7"+ + "[\2\2\u014d\34\3\2\2\2\u014e\u014f\7E\2\2\u014f\u0150\7C\2\2\u0150\u0151"+ + "\7U\2\2\u0151\u0152\7G\2\2\u0152\36\3\2\2\2\u0153\u0154\7E\2\2\u0154\u0155"+ + "\7C\2\2\u0155\u0156\7U\2\2\u0156\u0157\7V\2\2\u0157 \3\2\2\2\u0158\u0159"+ + "\7E\2\2\u0159\u015a\7C\2\2\u015a\u015b\7V\2\2\u015b\u015c\7C\2\2\u015c"+ + "\u015d\7N\2\2\u015d\u015e\7Q\2\2\u015e\u015f\7I\2\2\u015f\"\3\2\2\2\u0160"+ + "\u0161\7E\2\2\u0161\u0162\7C\2\2\u0162\u0163\7V\2\2\u0163\u0164\7C\2\2"+ + "\u0164\u0165\7N\2\2\u0165\u0166\7Q\2\2\u0166\u0167\7I\2\2\u0167\u0168"+ + "\7U\2\2\u0168$\3\2\2\2\u0169\u016a\7E\2\2\u016a\u016b\7Q\2\2\u016b\u016c"+ + "\7N\2\2\u016c\u016d\7W\2\2\u016d\u016e\7O\2\2\u016e\u016f\7P\2\2\u016f"+ + "\u0170\7U\2\2\u0170&\3\2\2\2\u0171\u0172\7E\2\2\u0172\u0173\7Q\2\2\u0173"+ + "\u0174\7P\2\2\u0174\u0175\7X\2\2\u0175\u0176\7G\2\2\u0176\u0177\7T\2\2"+ + "\u0177\u0178\7V\2\2\u0178(\3\2\2\2\u0179\u017a\7E\2\2\u017a\u017b\7W\2"+ + "\2\u017b\u017c\7T\2\2\u017c\u017d\7T\2\2\u017d\u017e\7G\2\2\u017e\u017f"+ + "\7P\2\2\u017f\u0180\7V\2\2\u0180\u0181\7a\2\2\u0181\u0182\7F\2\2\u0182"+ + "\u0183\7C\2\2\u0183\u0184\7V\2\2\u0184\u0185\7G\2\2\u0185*\3\2\2\2\u0186"+ + "\u0187\7E\2\2\u0187\u0188\7W\2\2\u0188\u0189\7T\2\2\u0189\u018a\7T\2\2"+ + "\u018a\u018b\7G\2\2\u018b\u018c\7P\2\2\u018c\u018d\7V\2\2\u018d\u018e"+ + "\7a\2\2\u018e\u018f\7V\2\2\u018f\u0190\7K\2\2\u0190\u0191\7O\2\2\u0191"+ + "\u0192\7G\2\2\u0192,\3\2\2\2\u0193\u0194\7E\2\2\u0194\u0195\7W\2\2\u0195"+ + "\u0196\7T\2\2\u0196\u0197\7T\2\2\u0197\u0198\7G\2\2\u0198\u0199\7P\2\2"+ + "\u0199\u019a\7V\2\2\u019a\u019b\7a\2\2\u019b\u019c\7V\2\2\u019c\u019d"+ + "\7K\2\2\u019d\u019e\7O\2\2\u019e\u019f\7G\2\2\u019f\u01a0\7U\2\2\u01a0"+ + "\u01a1\7V\2\2\u01a1\u01a2\7C\2\2\u01a2\u01a3\7O\2\2\u01a3\u01a4\7R\2\2"+ + "\u01a4.\3\2\2\2\u01a5\u01a6\7F\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7[\2"+ + "\2\u01a8\60\3\2\2\2\u01a9\u01aa\7F\2\2\u01aa\u01ab\7C\2\2\u01ab\u01ac"+ + "\7[\2\2\u01ac\u01ad\7U\2\2\u01ad\62\3\2\2\2\u01ae\u01af\7F\2\2\u01af\u01b0"+ + "\7G\2\2\u01b0\u01b1\7D\2\2\u01b1\u01b2\7W\2\2\u01b2\u01b3\7I\2\2\u01b3"+ + "\64\3\2\2\2\u01b4\u01b5\7F\2\2\u01b5\u01b6\7G\2\2\u01b6\u01b7\7U\2\2\u01b7"+ + "\u01b8\7E\2\2\u01b8\66\3\2\2\2\u01b9\u01ba\7F\2\2\u01ba\u01bb\7G\2\2\u01bb"+ + "\u01bc\7U\2\2\u01bc\u01bd\7E\2\2\u01bd\u01be\7T\2\2\u01be\u01bf\7K\2\2"+ + "\u01bf\u01c0\7D\2\2\u01c0\u01c1\7G\2\2\u01c18\3\2\2\2\u01c2\u01c3\7F\2"+ + "\2\u01c3\u01c4\7K\2\2\u01c4\u01c5\7U\2\2\u01c5\u01c6\7V\2\2\u01c6\u01c7"+ + "\7K\2\2\u01c7\u01c8\7P\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7V\2\2\u01ca"+ + ":\3\2\2\2\u01cb\u01cc\7G\2\2\u01cc\u01cd\7N\2\2\u01cd\u01ce\7U\2\2\u01ce"+ + "\u01cf\7G\2\2\u01cf<\3\2\2\2\u01d0\u01d1\7G\2\2\u01d1\u01d2\7P\2\2\u01d2"+ + "\u01d3\7F\2\2\u01d3>\3\2\2\2\u01d4\u01d5\7G\2\2\u01d5\u01d6\7U\2\2\u01d6"+ + "\u01d7\7E\2\2\u01d7\u01d8\7C\2\2\u01d8\u01d9\7R\2\2\u01d9\u01da\7G\2\2"+ + "\u01da@\3\2\2\2\u01db\u01dc\7G\2\2\u01dc\u01dd\7Z\2\2\u01dd\u01de\7G\2"+ + "\2\u01de\u01df\7E\2\2\u01df\u01e0\7W\2\2\u01e0\u01e1\7V\2\2\u01e1\u01e2"+ + "\7C\2\2\u01e2\u01e3\7D\2\2\u01e3\u01e4\7N\2\2\u01e4\u01e5\7G\2\2\u01e5"+ + "B\3\2\2\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7Z\2\2\u01e8\u01e9\7K\2\2\u01e9"+ + "\u01ea\7U\2\2\u01ea\u01eb\7V\2\2\u01eb\u01ec\7U\2\2\u01ecD\3\2\2\2\u01ed"+ + "\u01ee\7G\2\2\u01ee\u01ef\7Z\2\2\u01ef\u01f0\7R\2\2\u01f0\u01f1\7N\2\2"+ + "\u01f1\u01f2\7C\2\2\u01f2\u01f3\7K\2\2\u01f3\u01f4\7P\2\2\u01f4F\3\2\2"+ + "\2\u01f5\u01f6\7G\2\2\u01f6\u01f7\7Z\2\2\u01f7\u01f8\7V\2\2\u01f8\u01f9"+ + "\7T\2\2\u01f9\u01fa\7C\2\2\u01fa\u01fb\7E\2\2\u01fb\u01fc\7V\2\2\u01fc"+ + "H\3\2\2\2\u01fd\u01fe\7H\2\2\u01fe\u01ff\7C\2\2\u01ff\u0200\7N\2\2\u0200"+ + "\u0201\7U\2\2\u0201\u0202\7G\2\2\u0202J\3\2\2\2\u0203\u0204\7H\2\2\u0204"+ + "\u0205\7K\2\2\u0205\u0206\7T\2\2\u0206\u0207\7U\2\2\u0207\u0208\7V\2\2"+ + "\u0208L\3\2\2\2\u0209\u020a\7H\2\2\u020a\u020b\7Q\2\2\u020b\u020c\7T\2"+ + "\2\u020c\u020d\7O\2\2\u020d\u020e\7C\2\2\u020e\u020f\7V\2\2\u020fN\3\2"+ + "\2\2\u0210\u0211\7H\2\2\u0211\u0212\7T\2\2\u0212\u0213\7Q\2\2\u0213\u0214"+ + "\7O\2\2\u0214P\3\2\2\2\u0215\u0216\7H\2\2\u0216\u0217\7T\2\2\u0217\u0218"+ + "\7Q\2\2\u0218\u0219\7\\\2\2\u0219\u021a\7G\2\2\u021a\u021b\7P\2\2\u021b"+ + "R\3\2\2\2\u021c\u021d\7H\2\2\u021d\u021e\7W\2\2\u021e\u021f\7N\2\2\u021f"+ + "\u0220\7N\2\2\u0220T\3\2\2\2\u0221\u0222\7H\2\2\u0222\u0223\7W\2\2\u0223"+ + "\u0224\7P\2\2\u0224\u0225\7E\2\2\u0225\u0226\7V\2\2\u0226\u0227\7K\2\2"+ + "\u0227\u0228\7Q\2\2\u0228\u0229\7P\2\2\u0229\u022a\7U\2\2\u022aV\3\2\2"+ + "\2\u022b\u022c\7I\2\2\u022c\u022d\7T\2\2\u022d\u022e\7C\2\2\u022e\u022f"+ + "\7R\2\2\u022f\u0230\7J\2\2\u0230\u0231\7X\2\2\u0231\u0232\7K\2\2\u0232"+ + "\u0233\7\\\2\2\u0233X\3\2\2\2\u0234\u0235\7I\2\2\u0235\u0236\7T\2\2\u0236"+ + "\u0237\7Q\2\2\u0237\u0238\7W\2\2\u0238\u0239\7R\2\2\u0239Z\3\2\2\2\u023a"+ + "\u023b\7J\2\2\u023b\u023c\7C\2\2\u023c\u023d\7X\2\2\u023d\u023e\7K\2\2"+ + "\u023e\u023f\7P\2\2\u023f\u0240\7I\2\2\u0240\\\3\2\2\2\u0241\u0242\7J"+ + "\2\2\u0242\u0243\7Q\2\2\u0243\u0244\7W\2\2\u0244\u0245\7T\2\2\u0245^\3"+ + "\2\2\2\u0246\u0247\7J\2\2\u0247\u0248\7Q\2\2\u0248\u0249\7W\2\2\u0249"+ + "\u024a\7T\2\2\u024a\u024b\7U\2\2\u024b`\3\2\2\2\u024c\u024d\7K\2\2\u024d"+ + "\u024e\7P\2\2\u024eb\3\2\2\2\u024f\u0250\7K\2\2\u0250\u0251\7P\2\2\u0251"+ + "\u0252\7E\2\2\u0252\u0253\7N\2\2\u0253\u0254\7W\2\2\u0254\u0255\7F\2\2"+ + "\u0255\u0256\7G\2\2\u0256d\3\2\2\2\u0257\u0258\7K\2\2\u0258\u0259\7P\2"+ + "\2\u0259\u025a\7P\2\2\u025a\u025b\7G\2\2\u025b\u025c\7T\2\2\u025cf\3\2"+ + "\2\2\u025d\u025e\7K\2\2\u025e\u025f\7P\2\2\u025f\u0260\7V\2\2\u0260\u0261"+ + "\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7X\2\2\u0263\u0264\7C\2\2\u0264"+ + "\u0265\7N\2\2\u0265h\3\2\2\2\u0266\u0267\7K\2\2\u0267\u0268\7U\2\2\u0268"+ + "j\3\2\2\2\u0269\u026a\7L\2\2\u026a\u026b\7Q\2\2\u026b\u026c\7K\2\2\u026c"+ + "\u026d\7P\2\2\u026dl\3\2\2\2\u026e\u026f\7N\2\2\u026f\u0270\7C\2\2\u0270"+ + "\u0271\7U\2\2\u0271\u0272\7V\2\2\u0272n\3\2\2\2\u0273\u0274\7N\2\2\u0274"+ + "\u0275\7G\2\2\u0275\u0276\7H\2\2\u0276\u0277\7V\2\2\u0277p\3\2\2\2\u0278"+ + "\u0279\7N\2\2\u0279\u027a\7K\2\2\u027a\u027b\7M\2\2\u027b\u027c\7G\2\2"+ + "\u027cr\3\2\2\2\u027d\u027e\7N\2\2\u027e\u027f\7K\2\2\u027f\u0280\7O\2"+ + "\2\u0280\u0281\7K\2\2\u0281\u0282\7V\2\2\u0282t\3\2\2\2\u0283\u0284\7"+ + "O\2\2\u0284\u0285\7C\2\2\u0285\u0286\7R\2\2\u0286\u0287\7R\2\2\u0287\u0288"+ + "\7G\2\2\u0288\u0289\7F\2\2\u0289v\3\2\2\2\u028a\u028b\7O\2\2\u028b\u028c"+ + "\7C\2\2\u028c\u028d\7V\2\2\u028d\u028e\7E\2\2\u028e\u028f\7J\2\2\u028f"+ + "x\3\2\2\2\u0290\u0291\7O\2\2\u0291\u0292\7K\2\2\u0292\u0293\7P\2\2\u0293"+ + "\u0294\7W\2\2\u0294\u0295\7V\2\2\u0295\u0296\7G\2\2\u0296z\3\2\2\2\u0297"+ + "\u0298\7O\2\2\u0298\u0299\7K\2\2\u0299\u029a\7P\2\2\u029a\u029b\7W\2\2"+ + "\u029b\u029c\7V\2\2\u029c\u029d\7G\2\2\u029d\u029e\7U\2\2\u029e|\3\2\2"+ + "\2\u029f\u02a0\7O\2\2\u02a0\u02a1\7Q\2\2\u02a1\u02a2\7P\2\2\u02a2\u02a3"+ + "\7V\2\2\u02a3\u02a4\7J\2\2\u02a4~\3\2\2\2\u02a5\u02a6\7O\2\2\u02a6\u02a7"+ + "\7Q\2\2\u02a7\u02a8\7P\2\2\u02a8\u02a9\7V\2\2\u02a9\u02aa\7J\2\2\u02aa"+ + "\u02ab\7U\2\2\u02ab\u0080\3\2\2\2\u02ac\u02ad\7P\2\2\u02ad\u02ae\7C\2"+ + "\2\u02ae\u02af\7V\2\2\u02af\u02b0\7W\2\2\u02b0\u02b1\7T\2\2\u02b1\u02b2"+ + "\7C\2\2\u02b2\u02b3\7N\2\2\u02b3\u0082\3\2\2\2\u02b4\u02b5\7P\2\2\u02b5"+ + "\u02b6\7Q\2\2\u02b6\u02b7\7V\2\2\u02b7\u0084\3\2\2\2\u02b8\u02b9\7P\2"+ + "\2\u02b9\u02ba\7W\2\2\u02ba\u02bb\7N\2\2\u02bb\u02bc\7N\2\2\u02bc\u0086"+ + "\3\2\2\2\u02bd\u02be\7P\2\2\u02be\u02bf\7W\2\2\u02bf\u02c0\7N\2\2\u02c0"+ + "\u02c1\7N\2\2\u02c1\u02c2\7U\2\2\u02c2\u0088\3\2\2\2\u02c3\u02c4\7Q\2"+ + "\2\u02c4\u02c5\7P\2\2\u02c5\u008a\3\2\2\2\u02c6\u02c7\7Q\2\2\u02c7\u02c8"+ + "\7R\2\2\u02c8\u02c9\7V\2\2\u02c9\u02ca\7K\2\2\u02ca\u02cb\7O\2\2\u02cb"+ + "\u02cc\7K\2\2\u02cc\u02cd\7\\\2\2\u02cd\u02ce\7G\2\2\u02ce\u02cf\7F\2"+ + "\2\u02cf\u008c\3\2\2\2\u02d0\u02d1\7Q\2\2\u02d1\u02d2\7T\2\2\u02d2\u008e"+ + "\3\2\2\2\u02d3\u02d4\7Q\2\2\u02d4\u02d5\7T\2\2\u02d5\u02d6\7F\2\2\u02d6"+ + "\u02d7\7G\2\2\u02d7\u02d8\7T\2\2\u02d8\u0090\3\2\2\2\u02d9\u02da\7Q\2"+ + "\2\u02da\u02db\7W\2\2\u02db\u02dc\7V\2\2\u02dc\u02dd\7G\2\2\u02dd\u02de"+ + "\7T\2\2\u02de\u0092\3\2\2\2\u02df\u02e0\7R\2\2\u02e0\u02e1\7C\2\2\u02e1"+ + "\u02e2\7T\2\2\u02e2\u02e3\7U\2\2\u02e3\u02e4\7G\2\2\u02e4\u02e5\7F\2\2"+ + "\u02e5\u0094\3\2\2\2\u02e6\u02e7\7R\2\2\u02e7\u02e8\7J\2\2\u02e8\u02e9"+ + "\7[\2\2\u02e9\u02ea\7U\2\2\u02ea\u02eb\7K\2\2\u02eb\u02ec\7E\2\2\u02ec"+ + "\u02ed\7C\2\2\u02ed\u02ee\7N\2\2\u02ee\u0096\3\2\2\2\u02ef\u02f0\7R\2"+ + "\2\u02f0\u02f1\7N\2\2\u02f1\u02f2\7C\2\2\u02f2\u02f3\7P\2\2\u02f3\u0098"+ + "\3\2\2\2\u02f4\u02f5\7T\2\2\u02f5\u02f6\7K\2\2\u02f6\u02f7\7I\2\2\u02f7"+ + "\u02f8\7J\2\2\u02f8\u02f9\7V\2\2\u02f9\u009a\3\2\2\2\u02fa\u02fb\7T\2"+ + "\2\u02fb\u02fc\7N\2\2\u02fc\u02fd\7K\2\2\u02fd\u02fe\7M\2\2\u02fe\u02ff"+ + "\7G\2\2\u02ff\u009c\3\2\2\2\u0300\u0301\7S\2\2\u0301\u0302\7W\2\2\u0302"+ + "\u0303\7G\2\2\u0303\u0304\7T\2\2\u0304\u0305\7[\2\2\u0305\u009e\3\2\2"+ + "\2\u0306\u0307\7U\2\2\u0307\u0308\7E\2\2\u0308\u0309\7J\2\2\u0309\u030a"+ + "\7G\2\2\u030a\u030b\7O\2\2\u030b\u030c\7C\2\2\u030c\u030d\7U\2\2\u030d"+ + "\u00a0\3\2\2\2\u030e\u030f\7U\2\2\u030f\u0310\7G\2\2\u0310\u0311\7E\2"+ + "\2\u0311\u0312\7Q\2\2\u0312\u0313\7P\2\2\u0313\u0314\7F\2\2\u0314\u00a2"+ + "\3\2\2\2\u0315\u0316\7U\2\2\u0316\u0317\7G\2\2\u0317\u0318\7E\2\2\u0318"+ + "\u0319\7Q\2\2\u0319\u031a\7P\2\2\u031a\u031b\7F\2\2\u031b\u031c\7U\2\2"+ + "\u031c\u00a4\3\2\2\2\u031d\u031e\7U\2\2\u031e\u031f\7G\2\2\u031f\u0320"+ + "\7N\2\2\u0320\u0321\7G\2\2\u0321\u0322\7E\2\2\u0322\u0323\7V\2\2\u0323"+ + "\u00a6\3\2\2\2\u0324\u0325\7U\2\2\u0325\u0326\7J\2\2\u0326\u0327\7Q\2"+ + "\2\u0327\u0328\7Y\2\2\u0328\u00a8\3\2\2\2\u0329\u032a\7U\2\2\u032a\u032b"+ + "\7[\2\2\u032b\u032c\7U\2\2\u032c\u00aa\3\2\2\2\u032d\u032e\7V\2\2\u032e"+ + "\u032f\7C\2\2\u032f\u0330\7D\2\2\u0330\u0331\7N\2\2\u0331\u0332\7G\2\2"+ + "\u0332\u00ac\3\2\2\2\u0333\u0334\7V\2\2\u0334\u0335\7C\2\2\u0335\u0336"+ + "\7D\2\2\u0336\u0337\7N\2\2\u0337\u0338\7G\2\2\u0338\u0339\7U\2\2\u0339"+ + "\u00ae\3\2\2\2\u033a\u033b\7V\2\2\u033b\u033c\7G\2\2\u033c\u033d\7Z\2"+ + "\2\u033d\u033e\7V\2\2\u033e\u00b0\3\2\2\2\u033f\u0340\7V\2\2\u0340\u0341"+ + "\7J\2\2\u0341\u0342\7G\2\2\u0342\u0343\7P\2\2\u0343\u00b2\3\2\2\2\u0344"+ + "\u0345\7V\2\2\u0345\u0346\7T\2\2\u0346\u0347\7W\2\2\u0347\u0348\7G\2\2"+ + "\u0348\u00b4\3\2\2\2\u0349\u034a\7V\2\2\u034a\u034b\7Q\2\2\u034b\u00b6"+ + "\3\2\2\2\u034c\u034d\7V\2\2\u034d\u034e\7[\2\2\u034e\u034f\7R\2\2\u034f"+ + "\u0350\7G\2\2\u0350\u00b8\3\2\2\2\u0351\u0352\7V\2\2\u0352\u0353\7[\2"+ + "\2\u0353\u0354\7R\2\2\u0354\u0355\7G\2\2\u0355\u0356\7U\2\2\u0356\u00ba"+ + "\3\2\2\2\u0357\u0358\7W\2\2\u0358\u0359\7U\2\2\u0359\u035a\7K\2\2\u035a"+ + "\u035b\7P\2\2\u035b\u035c\7I\2\2\u035c\u00bc\3\2\2\2\u035d\u035e\7X\2"+ + "\2\u035e\u035f\7G\2\2\u035f\u0360\7T\2\2\u0360\u0361\7K\2\2\u0361\u0362"+ + "\7H\2\2\u0362\u0363\7[\2\2\u0363\u00be\3\2\2\2\u0364\u0365\7Y\2\2\u0365"+ + "\u0366\7J\2\2\u0366\u0367\7G\2\2\u0367\u0368\7P\2\2\u0368\u00c0\3\2\2"+ + "\2\u0369\u036a\7Y\2\2\u036a\u036b\7J\2\2\u036b\u036c\7G\2\2\u036c\u036d"+ + "\7T\2\2\u036d\u036e\7G\2\2\u036e\u00c2\3\2\2\2\u036f\u0370\7Y\2\2\u0370"+ + "\u0371\7K\2\2\u0371\u0372\7V\2\2\u0372\u0373\7J\2\2\u0373\u00c4\3\2\2"+ + "\2\u0374\u0375\7[\2\2\u0375\u0376\7G\2\2\u0376\u0377\7C\2\2\u0377\u0378"+ + "\7T\2\2\u0378\u00c6\3\2\2\2\u0379\u037a\7[\2\2\u037a\u037b\7G\2\2\u037b"+ + "\u037c\7C\2\2\u037c\u037d\7T\2\2\u037d\u037e\7U\2\2\u037e\u00c8\3\2\2"+ + "\2\u037f\u0380\7}\2\2\u0380\u0381\7G\2\2\u0381\u0382\7U\2\2\u0382\u0383"+ + "\7E\2\2\u0383\u0384\7C\2\2\u0384\u0385\7R\2\2\u0385\u0386\7G\2\2\u0386"+ + "\u00ca\3\2\2\2\u0387\u0388\7}\2\2\u0388\u0389\7H\2\2\u0389\u038a\7P\2"+ + "\2\u038a\u00cc\3\2\2\2\u038b\u038c\7}\2\2\u038c\u038d\7N\2\2\u038d\u038e"+ + "\7K\2\2\u038e\u038f\7O\2\2\u038f\u0390\7K\2\2\u0390\u0391\7V\2\2\u0391"+ + "\u00ce\3\2\2\2\u0392\u0393\7}\2\2\u0393\u0394\7F\2\2\u0394\u00d0\3\2\2"+ + "\2\u0395\u0396\7}\2\2\u0396\u0397\7V\2\2\u0397\u00d2\3\2\2\2\u0398\u0399"+ + "\7}\2\2\u0399\u039a\7V\2\2\u039a\u039b\7U\2\2\u039b\u00d4\3\2\2\2\u039c"+ + "\u039d\7}\2\2\u039d\u039e\7I\2\2\u039e\u039f\7W\2\2\u039f\u03a0\7K\2\2"+ + "\u03a0\u03a1\7F\2\2\u03a1\u00d6\3\2\2\2\u03a2\u03a3\7\177\2\2\u03a3\u00d8"+ + "\3\2\2\2\u03a4\u03a5\7?\2\2\u03a5\u00da\3\2\2\2\u03a6\u03a7\7>\2\2\u03a7"+ + "\u03a8\7?\2\2\u03a8\u03a9\7@\2\2\u03a9\u00dc\3\2\2\2\u03aa\u03ab\7>\2"+ + "\2\u03ab\u03af\7@\2\2\u03ac\u03ad\7#\2\2\u03ad\u03af\7?\2\2\u03ae\u03aa"+ + "\3\2\2\2\u03ae\u03ac\3\2\2\2\u03af\u00de\3\2\2\2\u03b0\u03b1\7>\2\2\u03b1"+ + "\u00e0\3\2\2\2\u03b2\u03b3\7>\2\2\u03b3\u03b4\7?\2\2\u03b4\u00e2\3\2\2"+ + "\2\u03b5\u03b6\7@\2\2\u03b6\u00e4\3\2\2\2\u03b7\u03b8\7@\2\2\u03b8\u03b9"+ + "\7?\2\2\u03b9\u00e6\3\2\2\2\u03ba\u03bb\7-\2\2\u03bb\u00e8\3\2\2\2\u03bc"+ + "\u03bd\7/\2\2\u03bd\u00ea\3\2\2\2\u03be\u03bf\7,\2\2\u03bf\u00ec\3\2\2"+ + "\2\u03c0\u03c1\7\61\2\2\u03c1\u00ee\3\2\2\2\u03c2\u03c3\7\'\2\2\u03c3"+ + "\u00f0\3\2\2\2\u03c4\u03c5\7<\2\2\u03c5\u03c6\7<\2\2\u03c6\u00f2\3\2\2"+ + "\2\u03c7\u03c8\7~\2\2\u03c8\u03c9\7~\2\2\u03c9\u00f4\3\2\2\2\u03ca\u03cb"+ + "\7\60\2\2\u03cb\u00f6\3\2\2\2\u03cc\u03cd\7A\2\2\u03cd\u00f8\3\2\2\2\u03ce"+ + "\u03d4\7)\2\2\u03cf\u03d3\n\2\2\2\u03d0\u03d1\7)\2\2\u03d1\u03d3\7)\2"+ + "\2\u03d2\u03cf\3\2\2\2\u03d2\u03d0\3\2\2\2\u03d3\u03d6\3\2\2\2\u03d4\u03d2"+ + "\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d7\3\2\2\2\u03d6\u03d4\3\2\2\2\u03d7"+ + "\u03d8\7)\2\2\u03d8\u00fa\3\2\2\2\u03d9\u03db\5\u010b\u0086\2\u03da\u03d9"+ + "\3\2\2\2\u03db\u03dc\3\2\2\2\u03dc\u03da\3\2\2\2\u03dc\u03dd\3\2\2\2\u03dd"+ + "\u00fc\3\2\2\2\u03de\u03e0\5\u010b\u0086\2\u03df\u03de\3\2\2\2\u03e0\u03e1"+ + "\3\2\2\2\u03e1\u03df\3\2\2\2\u03e1\u03e2\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3"+ + "\u03e7\5\u00f5{\2\u03e4\u03e6\5\u010b\u0086\2\u03e5\u03e4\3\2\2\2\u03e6"+ + "\u03e9\3\2\2\2\u03e7\u03e5\3\2\2\2\u03e7\u03e8\3\2\2\2\u03e8\u0409\3\2"+ + "\2\2\u03e9\u03e7\3\2\2\2\u03ea\u03ec\5\u00f5{\2\u03eb\u03ed\5\u010b\u0086"+ + "\2\u03ec\u03eb\3\2\2\2\u03ed\u03ee\3\2\2\2\u03ee\u03ec\3\2\2\2\u03ee\u03ef"+ + "\3\2\2\2\u03ef\u0409\3\2\2\2\u03f0\u03f2\5\u010b\u0086\2\u03f1\u03f0\3"+ + "\2\2\2\u03f2\u03f3\3\2\2\2\u03f3\u03f1\3\2\2\2\u03f3\u03f4\3\2\2\2\u03f4"+ + "\u03fc\3\2\2\2\u03f5\u03f9\5\u00f5{\2\u03f6\u03f8\5\u010b\u0086\2\u03f7"+ + "\u03f6\3\2\2\2\u03f8\u03fb\3\2\2\2\u03f9\u03f7\3\2\2\2\u03f9\u03fa\3\2"+ + "\2\2\u03fa\u03fd\3\2\2\2\u03fb\u03f9\3\2\2\2\u03fc\u03f5\3\2\2\2\u03fc"+ + "\u03fd\3\2\2\2\u03fd\u03fe\3\2\2\2\u03fe\u03ff\5\u0109\u0085\2\u03ff\u0409"+ + "\3\2\2\2\u0400\u0402\5\u00f5{\2\u0401\u0403\5\u010b\u0086\2\u0402\u0401"+ + "\3\2\2\2\u0403\u0404\3\2\2\2\u0404\u0402\3\2\2\2\u0404\u0405\3\2\2\2\u0405"+ + "\u0406\3\2\2\2\u0406\u0407\5\u0109\u0085\2\u0407\u0409\3\2\2\2\u0408\u03df"+ + "\3\2\2\2\u0408\u03ea\3\2\2\2\u0408\u03f1\3\2\2\2\u0408\u0400\3\2\2\2\u0409"+ + "\u00fe\3\2\2\2\u040a\u040d\5\u010d\u0087\2\u040b\u040d\7a\2\2\u040c\u040a"+ + "\3\2\2\2\u040c\u040b\3\2\2\2\u040d\u0413\3\2\2\2\u040e\u0412\5\u010d\u0087"+ + "\2\u040f\u0412\5\u010b\u0086\2\u0410\u0412\t\3\2\2\u0411\u040e\3\2\2\2"+ + "\u0411\u040f\3\2\2\2\u0411\u0410\3\2\2\2\u0412\u0415\3\2\2\2\u0413\u0411"+ + "\3\2\2\2\u0413\u0414\3\2\2\2\u0414\u0100\3\2\2\2\u0415\u0413\3\2\2\2\u0416"+ + "\u041a\5\u010b\u0086\2\u0417\u041b\5\u010d\u0087\2\u0418\u041b\5\u010b"+ + "\u0086\2\u0419\u041b\t\3\2\2\u041a\u0417\3\2\2\2\u041a\u0418\3\2\2\2\u041a"+ + "\u0419\3\2\2\2\u041b\u041c\3\2\2\2\u041c\u041a\3\2\2\2\u041c\u041d\3\2"+ + "\2\2\u041d\u0102\3\2\2\2\u041e\u0422\5\u010d\u0087\2\u041f\u0422\5\u010b"+ + "\u0086\2\u0420\u0422\7a\2\2\u0421\u041e\3\2\2\2\u0421\u041f\3\2\2\2\u0421"+ + "\u0420\3\2\2\2\u0422\u0423\3\2\2\2\u0423\u0421\3\2\2\2\u0423\u0424\3\2"+ + "\2\2\u0424\u0104\3\2\2\2\u0425\u042b\7$\2\2\u0426\u042a\n\4\2\2\u0427"+ + "\u0428\7$\2\2\u0428\u042a\7$\2\2\u0429\u0426\3\2\2\2\u0429\u0427\3\2\2"+ + "\2\u042a\u042d\3\2\2\2\u042b\u0429\3\2\2\2\u042b\u042c\3\2\2\2\u042c\u042e"+ + "\3\2\2\2\u042d\u042b\3\2\2\2\u042e\u042f\7$\2\2\u042f\u0106\3\2\2\2\u0430"+ + "\u0436\7b\2\2\u0431\u0435\n\5\2\2\u0432\u0433\7b\2\2\u0433\u0435\7b\2"+ + "\2\u0434\u0431\3\2\2\2\u0434\u0432\3\2\2\2\u0435\u0438\3\2\2\2\u0436\u0434"+ + "\3\2\2\2\u0436\u0437\3\2\2\2\u0437\u0439\3\2\2\2\u0438\u0436\3\2\2\2\u0439"+ + "\u043a\7b\2\2\u043a\u0108\3\2\2\2\u043b\u043d\7G\2\2\u043c\u043e\t\6\2"+ + "\2\u043d\u043c\3\2\2\2\u043d\u043e\3\2\2\2\u043e\u0440\3\2\2\2\u043f\u0441"+ + "\5\u010b\u0086\2\u0440\u043f\3\2\2\2\u0441\u0442\3\2\2\2\u0442\u0440\3"+ + "\2\2\2\u0442\u0443\3\2\2\2\u0443\u010a\3\2\2\2\u0444\u0445\t\7\2\2\u0445"+ + "\u010c\3\2\2\2\u0446\u0447\t\b\2\2\u0447\u010e\3\2\2\2\u0448\u0449\7/"+ + "\2\2\u0449\u044a\7/\2\2\u044a\u044e\3\2\2\2\u044b\u044d\n\t\2\2\u044c"+ + "\u044b\3\2\2\2\u044d\u0450\3\2\2\2\u044e\u044c\3\2\2\2\u044e\u044f\3\2"+ + "\2\2\u044f\u0452\3\2\2\2\u0450\u044e\3\2\2\2\u0451\u0453\7\17\2\2\u0452"+ + "\u0451\3\2\2\2\u0452\u0453\3\2\2\2\u0453\u0455\3\2\2\2\u0454\u0456\7\f"+ + "\2\2\u0455\u0454\3\2\2\2\u0455\u0456\3\2\2\2\u0456\u0457\3\2\2\2\u0457"+ + "\u0458\b\u0088\2\2\u0458\u0110\3\2\2\2\u0459\u045a\7\61\2\2\u045a\u045b"+ + "\7,\2\2\u045b\u0460\3\2\2\2\u045c\u045f\5\u0111\u0089\2\u045d\u045f\13"+ + "\2\2\2\u045e\u045c\3\2\2\2\u045e\u045d\3\2\2\2\u045f\u0462\3\2\2\2\u0460"+ + "\u0461\3\2\2\2\u0460\u045e\3\2\2\2\u0461\u0463\3\2\2\2\u0462\u0460\3\2"+ + "\2\2\u0463\u0464\7,\2\2\u0464\u0465\7\61\2\2\u0465\u0466\3\2\2\2\u0466"+ + "\u0467\b\u0089\2\2\u0467\u0112\3\2\2\2\u0468\u046a\t\n\2\2\u0469\u0468"+ + "\3\2\2\2\u046a\u046b\3\2\2\2\u046b\u0469\3\2\2\2\u046b\u046c\3\2\2\2\u046c"+ + "\u046d\3\2\2\2\u046d\u046e\b\u008a\2\2\u046e\u0114\3\2\2\2\u046f\u0470"+ + "\13\2\2\2\u0470\u0116\3\2\2\2\"\2\u03ae\u03d2\u03d4\u03dc\u03e1\u03e7"+ + "\u03ee\u03f3\u03f9\u03fc\u0404\u0408\u040c\u0411\u0413\u041a\u041c\u0421"+ + "\u0423\u0429\u042b\u0434\u0436\u043d\u0442\u044e\u0452\u0455\u045e\u0460"+ + "\u046b\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 1ae317552268e..76e0f4654df6d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -1,27 +1,13 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; - -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.Parser; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseParser extends Parser { @@ -36,21 +22,21 @@ class SqlBaseParser extends Parser { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FULL=40, FUNCTIONS=41, GRAPHVIZ=42, GROUP=43, - HAVING=44, HOUR=45, HOURS=46, IN=47, INNER=48, INTERVAL=49, IS=50, JOIN=51, - LAST=52, LEFT=53, LIKE=54, LIMIT=55, MAPPED=56, MATCH=57, MINUTE=58, MINUTES=59, - MONTH=60, MONTHS=61, NATURAL=62, NOT=63, NULL=64, NULLS=65, ON=66, OPTIMIZED=67, - OR=68, ORDER=69, OUTER=70, PARSED=71, PHYSICAL=72, PLAN=73, RIGHT=74, - RLIKE=75, QUERY=76, SCHEMAS=77, SECOND=78, SECONDS=79, SELECT=80, SHOW=81, - SYS=82, TABLE=83, TABLES=84, TEXT=85, THEN=86, TRUE=87, TO=88, TYPE=89, - TYPES=90, USING=91, VERIFY=92, WHEN=93, WHERE=94, WITH=95, YEAR=96, YEARS=97, - ESCAPE_ESC=98, FUNCTION_ESC=99, LIMIT_ESC=100, DATE_ESC=101, TIME_ESC=102, - TIMESTAMP_ESC=103, GUID_ESC=104, ESC_END=105, EQ=106, NULLEQ=107, NEQ=108, - LT=109, LTE=110, GT=111, GTE=112, PLUS=113, MINUS=114, ASTERISK=115, SLASH=116, - PERCENT=117, CAST_OP=118, CONCAT=119, DOT=120, PARAM=121, STRING=122, - INTEGER_VALUE=123, DECIMAL_VALUE=124, IDENTIFIER=125, DIGIT_IDENTIFIER=126, - TABLE_IDENTIFIER=127, QUOTED_IDENTIFIER=128, BACKQUOTED_IDENTIFIER=129, - SIMPLE_COMMENT=130, BRACKETED_COMMENT=131, WS=132, UNRECOGNIZED=133, DELIMITER=134; + FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, + GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, + IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, + MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, + NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, + PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, + SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, + TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, + YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, + TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, + NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, + SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, + INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, + TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, + SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135, DELIMITER=136; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, @@ -91,18 +77,18 @@ class SqlBaseParser extends Parser { "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", - "'FROM'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", - "'HOUR'", "'HOURS'", "'IN'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", - "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", - "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", - "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", - "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", - "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", - "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", - "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", - "'{D'", "'{T'", "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", - "'.'", "'?'" + "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", + "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", + "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", + "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", + "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", + "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", + "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", + "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", + "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", + "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", + "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -110,19 +96,20 @@ class SqlBaseParser extends Parser { "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", "HOUR", "HOURS", - "IN", "INNER", "INTERVAL", "IS", "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", - "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", "MONTHS", "NATURAL", - "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", - "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", - "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", - "TYPE", "TYPES", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", - "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", - "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", - "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", - "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" + "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", + "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", + "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", + "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", + "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", + "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", + "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", + "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", + "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", + "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", + "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -540,6 +527,8 @@ public static class ShowTablesContext extends StatementContext { public TableIdentifierContext tableIdent; public TerminalNode SHOW() { return getToken(SqlBaseParser.SHOW, 0); } public TerminalNode TABLES() { return getToken(SqlBaseParser.TABLES, 0); } + public TerminalNode INCLUDE() { return getToken(SqlBaseParser.INCLUDE, 0); } + public TerminalNode FROZEN() { return getToken(SqlBaseParser.FROZEN, 0); } public LikePatternContext likePattern() { return getRuleContext(LikePatternContext.class,0); } @@ -586,6 +575,8 @@ public static class ShowColumnsContext extends StatementContext { public TerminalNode COLUMNS() { return getToken(SqlBaseParser.COLUMNS, 0); } public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); } public TerminalNode IN() { return getToken(SqlBaseParser.IN, 0); } + public TerminalNode INCLUDE() { return getToken(SqlBaseParser.INCLUDE, 0); } + public TerminalNode FROZEN() { return getToken(SqlBaseParser.FROZEN, 0); } public LikePatternContext likePattern() { return getRuleContext(LikePatternContext.class,0); } @@ -615,9 +606,9 @@ public final StatementContext statement() throws RecognitionException { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(217); + setState(229); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); @@ -773,10 +764,21 @@ public final StatementContext statement() throws RecognitionException { setState(150); match(TABLES); setState(153); + _la = _input.LA(1); + if (_la==INCLUDE) { + { + setState(151); + match(INCLUDE); + setState(152); + match(FROZEN); + } + } + + setState(157); switch (_input.LA(1)) { case LIKE: { - setState(151); + setState(155); ((ShowTablesContext)_localctx).tableLike = likePattern(); } break; @@ -825,7 +827,7 @@ public final StatementContext statement() throws RecognitionException { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(152); + setState(156); ((ShowTablesContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -840,22 +842,33 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(155); + setState(159); match(SHOW); - setState(156); + setState(160); match(COLUMNS); - setState(157); + setState(163); + _la = _input.LA(1); + if (_la==INCLUDE) { + { + setState(161); + match(INCLUDE); + setState(162); + match(FROZEN); + } + } + + setState(165); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(160); + setState(168); switch (_input.LA(1)) { case LIKE: { - setState(158); + setState(166); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -904,7 +917,7 @@ public final StatementContext statement() throws RecognitionException { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(159); + setState(167); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -917,18 +930,29 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(162); + setState(170); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(165); + setState(173); + _la = _input.LA(1); + if (_la==INCLUDE) { + { + setState(171); + match(INCLUDE); + setState(172); + match(FROZEN); + } + } + + setState(177); switch (_input.LA(1)) { case LIKE: { - setState(163); + setState(175); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -977,7 +1001,7 @@ public final StatementContext statement() throws RecognitionException { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(164); + setState(176); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -990,15 +1014,15 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(167); + setState(179); match(SHOW); - setState(168); + setState(180); match(FUNCTIONS); - setState(170); + setState(182); _la = _input.LA(1); if (_la==LIKE) { { - setState(169); + setState(181); likePattern(); } } @@ -1009,9 +1033,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(172); + setState(184); match(SHOW); - setState(173); + setState(185); match(SCHEMAS); } break; @@ -1019,58 +1043,58 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(174); + setState(186); match(SYS); - setState(175); + setState(187); match(TABLES); - setState(178); + setState(190); _la = _input.LA(1); if (_la==CATALOG) { { - setState(176); + setState(188); match(CATALOG); - setState(177); + setState(189); ((SysTablesContext)_localctx).clusterLike = likePattern(); } } - setState(182); + setState(194); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(180); + setState(192); ((SysTablesContext)_localctx).tableLike = likePattern(); } break; case 2: { - setState(181); + setState(193); ((SysTablesContext)_localctx).tableIdent = tableIdentifier(); } break; } - setState(193); + setState(205); _la = _input.LA(1); if (_la==TYPE) { { - setState(184); + setState(196); match(TYPE); - setState(185); + setState(197); string(); - setState(190); + setState(202); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(186); + setState(198); match(T__2); - setState(187); + setState(199); string(); } } - setState(192); + setState(204); _errHandler.sync(this); _la = _input.LA(1); } @@ -1083,28 +1107,28 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(195); + setState(207); match(SYS); - setState(196); + setState(208); match(COLUMNS); - setState(199); + setState(211); _la = _input.LA(1); if (_la==CATALOG) { { - setState(197); + setState(209); match(CATALOG); - setState(198); + setState(210); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(204); + setState(216); switch (_input.LA(1)) { case TABLE: { - setState(201); + setState(213); match(TABLE); - setState(202); + setState(214); ((SysColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -1153,7 +1177,7 @@ public final StatementContext statement() throws RecognitionException { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(203); + setState(215); ((SysColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1163,11 +1187,11 @@ public final StatementContext statement() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(207); + setState(219); _la = _input.LA(1); if (_la==LIKE) { { - setState(206); + setState(218); ((SysColumnsContext)_localctx).columnPattern = likePattern(); } } @@ -1178,19 +1202,19 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(209); + setState(221); match(SYS); - setState(210); + setState(222); match(TYPES); - setState(215); + setState(227); _la = _input.LA(1); - if (((((_la - 113)) & ~0x3f) == 0 && ((1L << (_la - 113)) & ((1L << (PLUS - 113)) | (1L << (MINUS - 113)) | (1L << (INTEGER_VALUE - 113)) | (1L << (DECIMAL_VALUE - 113)))) != 0)) { + if (((((_la - 115)) & ~0x3f) == 0 && ((1L << (_la - 115)) & ((1L << (PLUS - 115)) | (1L << (MINUS - 115)) | (1L << (INTEGER_VALUE - 115)) | (1L << (DECIMAL_VALUE - 115)))) != 0)) { { - setState(212); + setState(224); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(211); + setState(223); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -1200,7 +1224,7 @@ public final StatementContext statement() throws RecognitionException { } } - setState(214); + setState(226); ((SysTypesContext)_localctx).type = number(); } } @@ -1257,34 +1281,34 @@ public final QueryContext query() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(228); + setState(240); _la = _input.LA(1); if (_la==WITH) { { - setState(219); + setState(231); match(WITH); - setState(220); + setState(232); namedQuery(); - setState(225); + setState(237); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(221); + setState(233); match(T__2); - setState(222); + setState(234); namedQuery(); } } - setState(227); + setState(239); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(230); + setState(242); queryNoWith(); } } @@ -1340,42 +1364,42 @@ public final QueryNoWithContext queryNoWith() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(232); + setState(244); queryTerm(); - setState(243); + setState(255); _la = _input.LA(1); if (_la==ORDER) { { - setState(233); + setState(245); match(ORDER); - setState(234); + setState(246); match(BY); - setState(235); + setState(247); orderBy(); - setState(240); + setState(252); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(236); + setState(248); match(T__2); - setState(237); + setState(249); orderBy(); } } - setState(242); + setState(254); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(246); + setState(258); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(245); + setState(257); limitClause(); } } @@ -1424,14 +1448,14 @@ public final LimitClauseContext limitClause() throws RecognitionException { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(253); + setState(265); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(248); + setState(260); match(LIMIT); - setState(249); + setState(261); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1444,9 +1468,9 @@ public final LimitClauseContext limitClause() throws RecognitionException { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(250); + setState(262); match(LIMIT_ESC); - setState(251); + setState(263); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1454,7 +1478,7 @@ public final LimitClauseContext limitClause() throws RecognitionException { } else { consume(); } - setState(252); + setState(264); match(ESC_END); } break; @@ -1527,13 +1551,13 @@ public final QueryTermContext queryTerm() throws RecognitionException { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(260); + setState(272); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(255); + setState(267); querySpecification(); } break; @@ -1541,11 +1565,11 @@ public final QueryTermContext queryTerm() throws RecognitionException { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(256); + setState(268); match(T__0); - setState(257); + setState(269); queryNoWith(); - setState(258); + setState(270); match(T__1); } break; @@ -1601,13 +1625,13 @@ public final OrderByContext orderBy() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(262); + setState(274); expression(); - setState(264); + setState(276); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(263); + setState(275); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1618,13 +1642,13 @@ public final OrderByContext orderBy() throws RecognitionException { } } - setState(268); + setState(280); _la = _input.LA(1); if (_la==NULLS) { { - setState(266); + setState(278); match(NULLS); - setState(267); + setState(279); ((OrderByContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -1703,75 +1727,75 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(270); + setState(282); match(SELECT); - setState(272); + setState(284); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(271); + setState(283); setQuantifier(); } } - setState(274); + setState(286); selectItem(); - setState(279); + setState(291); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(275); + setState(287); match(T__2); - setState(276); + setState(288); selectItem(); } } - setState(281); + setState(293); _errHandler.sync(this); _la = _input.LA(1); } - setState(283); + setState(295); _la = _input.LA(1); if (_la==FROM) { { - setState(282); + setState(294); fromClause(); } } - setState(287); + setState(299); _la = _input.LA(1); if (_la==WHERE) { { - setState(285); + setState(297); match(WHERE); - setState(286); + setState(298); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(292); + setState(304); _la = _input.LA(1); if (_la==GROUP) { { - setState(289); + setState(301); match(GROUP); - setState(290); + setState(302); match(BY); - setState(291); + setState(303); groupBy(); } } - setState(296); + setState(308); _la = _input.LA(1); if (_la==HAVING) { { - setState(294); + setState(306); match(HAVING); - setState(295); + setState(307); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1823,23 +1847,23 @@ public final FromClauseContext fromClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(298); + setState(310); match(FROM); - setState(299); + setState(311); relation(); - setState(304); + setState(316); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(300); + setState(312); match(T__2); - setState(301); + setState(313); relation(); } } - setState(306); + setState(318); _errHandler.sync(this); _la = _input.LA(1); } @@ -1892,30 +1916,30 @@ public final GroupByContext groupBy() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(308); + setState(320); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(307); + setState(319); setQuantifier(); } } - setState(310); + setState(322); groupingElement(); - setState(315); + setState(327); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(311); + setState(323); match(T__2); - setState(312); + setState(324); groupingElement(); } } - setState(317); + setState(329); _errHandler.sync(this); _la = _input.LA(1); } @@ -1970,7 +1994,7 @@ public final GroupingElementContext groupingElement() throws RecognitionExceptio _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(318); + setState(330); groupingExpressions(); } } @@ -2016,47 +2040,47 @@ public final GroupingExpressionsContext groupingExpressions() throws Recognition enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(333); + setState(345); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(320); + setState(332); match(T__0); - setState(329); + setState(341); _la = _input.LA(1); - if (((((_la - 1)) & ~0x3f) == 0 && ((1L << (_la - 1)) & ((1L << (T__0 - 1)) | (1L << (ANALYZE - 1)) | (1L << (ANALYZED - 1)) | (1L << (CASE - 1)) | (1L << (CAST - 1)) | (1L << (CATALOGS - 1)) | (1L << (COLUMNS - 1)) | (1L << (CONVERT - 1)) | (1L << (CURRENT_DATE - 1)) | (1L << (CURRENT_TIME - 1)) | (1L << (CURRENT_TIMESTAMP - 1)) | (1L << (DAY - 1)) | (1L << (DEBUG - 1)) | (1L << (EXECUTABLE - 1)) | (1L << (EXISTS - 1)) | (1L << (EXPLAIN - 1)) | (1L << (EXTRACT - 1)) | (1L << (FALSE - 1)) | (1L << (FIRST - 1)) | (1L << (FORMAT - 1)) | (1L << (FULL - 1)) | (1L << (FUNCTIONS - 1)) | (1L << (GRAPHVIZ - 1)) | (1L << (HOUR - 1)) | (1L << (INTERVAL - 1)) | (1L << (LAST - 1)) | (1L << (LEFT - 1)) | (1L << (LIMIT - 1)) | (1L << (MAPPED - 1)) | (1L << (MATCH - 1)) | (1L << (MINUTE - 1)) | (1L << (MONTH - 1)) | (1L << (NOT - 1)) | (1L << (NULL - 1)))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { { - setState(321); + setState(333); expression(); - setState(326); + setState(338); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(322); + setState(334); match(T__2); - setState(323); + setState(335); expression(); } } - setState(328); + setState(340); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(331); + setState(343); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(332); + setState(344); expression(); } break; @@ -2107,15 +2131,15 @@ public final NamedQueryContext namedQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(335); + setState(347); ((NamedQueryContext)_localctx).name = identifier(); - setState(336); + setState(348); match(AS); - setState(337); + setState(349); match(T__0); - setState(338); + setState(350); queryNoWith(); - setState(339); + setState(351); match(T__1); } } @@ -2159,7 +2183,7 @@ public final SetQuantifierContext setQuantifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(341); + setState(353); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2222,23 +2246,23 @@ public final SelectItemContext selectItem() throws RecognitionException { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(343); + setState(355); expression(); - setState(348); + setState(360); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(345); + setState(357); _la = _input.LA(1); if (_la==AS) { { - setState(344); + setState(356); match(AS); } } - setState(347); + setState(359); identifier(); } break; @@ -2292,19 +2316,19 @@ public final RelationContext relation() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(350); + setState(362); relationPrimary(); - setState(354); + setState(366); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (FULL - 40)) | (1L << (INNER - 40)) | (1L << (JOIN - 40)) | (1L << (LEFT - 40)) | (1L << (NATURAL - 40)) | (1L << (RIGHT - 40)))) != 0)) { + while (((((_la - 41)) & ~0x3f) == 0 && ((1L << (_la - 41)) & ((1L << (FULL - 41)) | (1L << (INNER - 41)) | (1L << (JOIN - 41)) | (1L << (LEFT - 41)) | (1L << (NATURAL - 41)) | (1L << (RIGHT - 41)))) != 0)) { { { - setState(351); + setState(363); joinRelation(); } } - setState(356); + setState(368); _errHandler.sync(this); _la = _input.LA(1); } @@ -2358,7 +2382,7 @@ public final JoinRelationContext joinRelation() throws RecognitionException { enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(368); + setState(380); switch (_input.LA(1)) { case FULL: case INNER: @@ -2368,18 +2392,18 @@ public final JoinRelationContext joinRelation() throws RecognitionException { enterOuterAlt(_localctx, 1); { { - setState(357); + setState(369); joinType(); } - setState(358); + setState(370); match(JOIN); - setState(359); + setState(371); ((JoinRelationContext)_localctx).right = relationPrimary(); - setState(361); + setState(373); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(360); + setState(372); joinCriteria(); } } @@ -2389,13 +2413,13 @@ public final JoinRelationContext joinRelation() throws RecognitionException { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(363); + setState(375); match(NATURAL); - setState(364); + setState(376); joinType(); - setState(365); + setState(377); match(JOIN); - setState(366); + setState(378); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2444,17 +2468,17 @@ public final JoinTypeContext joinType() throws RecognitionException { enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(385); + setState(397); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(371); + setState(383); _la = _input.LA(1); if (_la==INNER) { { - setState(370); + setState(382); match(INNER); } } @@ -2464,13 +2488,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case LEFT: enterOuterAlt(_localctx, 2); { - setState(373); + setState(385); match(LEFT); - setState(375); + setState(387); _la = _input.LA(1); if (_la==OUTER) { { - setState(374); + setState(386); match(OUTER); } } @@ -2480,13 +2504,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(377); + setState(389); match(RIGHT); - setState(379); + setState(391); _la = _input.LA(1); if (_la==OUTER) { { - setState(378); + setState(390); match(OUTER); } } @@ -2496,13 +2520,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case FULL: enterOuterAlt(_localctx, 4); { - setState(381); + setState(393); match(FULL); - setState(383); + setState(395); _la = _input.LA(1); if (_la==OUTER) { { - setState(382); + setState(394); match(OUTER); } } @@ -2560,43 +2584,43 @@ public final JoinCriteriaContext joinCriteria() throws RecognitionException { enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(401); + setState(413); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(387); + setState(399); match(ON); - setState(388); + setState(400); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(389); + setState(401); match(USING); - setState(390); + setState(402); match(T__0); - setState(391); + setState(403); identifier(); - setState(396); + setState(408); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(392); + setState(404); match(T__2); - setState(393); + setState(405); identifier(); } } - setState(398); + setState(410); _errHandler.sync(this); _la = _input.LA(1); } - setState(399); + setState(411); match(T__1); } break; @@ -2676,6 +2700,7 @@ public static class TableNameContext extends RelationPrimaryContext { public TableIdentifierContext tableIdentifier() { return getRuleContext(TableIdentifierContext.class,0); } + public TerminalNode FROZEN() { return getToken(SqlBaseParser.FROZEN, 0); } public QualifiedNameContext qualifiedName() { return getRuleContext(QualifiedNameContext.class,0); } @@ -2701,30 +2726,39 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(428); + setState(443); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(403); + setState(416); + _la = _input.LA(1); + if (_la==FROZEN) { + { + setState(415); + match(FROZEN); + } + } + + setState(418); tableIdentifier(); - setState(408); + setState(423); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(405); + setState(420); _la = _input.LA(1); if (_la==AS) { { - setState(404); + setState(419); match(AS); } } - setState(407); + setState(422); qualifiedName(); } break; @@ -2735,27 +2769,27 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(410); + setState(425); match(T__0); - setState(411); + setState(426); queryNoWith(); - setState(412); + setState(427); match(T__1); - setState(417); + setState(432); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { case 1: { - setState(414); + setState(429); _la = _input.LA(1); if (_la==AS) { { - setState(413); + setState(428); match(AS); } } - setState(416); + setState(431); qualifiedName(); } break; @@ -2766,27 +2800,27 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(419); + setState(434); match(T__0); - setState(420); + setState(435); relation(); - setState(421); + setState(436); match(T__1); - setState(426); + setState(441); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { case 1: { - setState(423); + setState(438); _la = _input.LA(1); if (_la==AS) { { - setState(422); + setState(437); match(AS); } } - setState(425); + setState(440); qualifiedName(); } break; @@ -2835,7 +2869,7 @@ public final ExpressionContext expression() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(430); + setState(445); booleanExpression(0); } } @@ -3043,18 +3077,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(463); + setState(478); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: { _localctx = new LogicalNotContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(433); + setState(448); match(NOT); - setState(434); + setState(449); booleanExpression(8); } break; @@ -3063,13 +3097,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(435); + setState(450); match(EXISTS); - setState(436); + setState(451); match(T__0); - setState(437); + setState(452); query(); - setState(438); + setState(453); match(T__1); } break; @@ -3078,15 +3112,15 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(440); + setState(455); match(QUERY); - setState(441); + setState(456); match(T__0); - setState(442); + setState(457); ((StringQueryContext)_localctx).queryString = string(); - setState(443); + setState(458); matchQueryOptions(); - setState(444); + setState(459); match(T__1); } break; @@ -3095,19 +3129,19 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(446); + setState(461); match(MATCH); - setState(447); + setState(462); match(T__0); - setState(448); + setState(463); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(449); + setState(464); match(T__2); - setState(450); + setState(465); ((MatchQueryContext)_localctx).queryString = string(); - setState(451); + setState(466); matchQueryOptions(); - setState(452); + setState(467); match(T__1); } break; @@ -3116,19 +3150,19 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(454); + setState(469); match(MATCH); - setState(455); + setState(470); match(T__0); - setState(456); + setState(471); ((MultiMatchQueryContext)_localctx).multiFields = string(); - setState(457); + setState(472); match(T__2); - setState(458); + setState(473); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(459); + setState(474); matchQueryOptions(); - setState(460); + setState(475); match(T__1); } break; @@ -3137,33 +3171,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(462); + setState(477); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(473); + setState(488); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,66,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(471); + setState(486); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,61,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { case 1: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(465); + setState(480); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(466); + setState(481); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(467); + setState(482); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3172,20 +3206,20 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(468); + setState(483); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(469); + setState(484); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(470); + setState(485); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(475); + setState(490); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,62,_ctx); + _alt = getInterpreter().adaptivePredict(_input,66,_ctx); } } } @@ -3233,19 +3267,19 @@ public final MatchQueryOptionsContext matchQueryOptions() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(480); + setState(495); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(476); + setState(491); match(T__2); - setState(477); + setState(492); string(); } } - setState(482); + setState(497); _errHandler.sync(this); _la = _input.LA(1); } @@ -3294,14 +3328,14 @@ public final PredicatedContext predicated() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(483); + setState(498); valueExpression(0); - setState(485); + setState(500); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { case 1: { - setState(484); + setState(499); predicate(); } break; @@ -3371,142 +3405,142 @@ public final PredicateContext predicate() throws RecognitionException { enterRule(_localctx, 50, RULE_predicate); int _la; try { - setState(533); + setState(548); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(488); + setState(503); _la = _input.LA(1); if (_la==NOT) { { - setState(487); + setState(502); match(NOT); } } - setState(490); + setState(505); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(491); + setState(506); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(492); + setState(507); match(AND); - setState(493); + setState(508); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(496); + setState(511); _la = _input.LA(1); if (_la==NOT) { { - setState(495); + setState(510); match(NOT); } } - setState(498); + setState(513); ((PredicateContext)_localctx).kind = match(IN); - setState(499); + setState(514); match(T__0); - setState(500); + setState(515); valueExpression(0); - setState(505); + setState(520); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(501); + setState(516); match(T__2); - setState(502); + setState(517); valueExpression(0); } } - setState(507); + setState(522); _errHandler.sync(this); _la = _input.LA(1); } - setState(508); + setState(523); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(511); + setState(526); _la = _input.LA(1); if (_la==NOT) { { - setState(510); + setState(525); match(NOT); } } - setState(513); + setState(528); ((PredicateContext)_localctx).kind = match(IN); - setState(514); + setState(529); match(T__0); - setState(515); + setState(530); query(); - setState(516); + setState(531); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(519); + setState(534); _la = _input.LA(1); if (_la==NOT) { { - setState(518); + setState(533); match(NOT); } } - setState(521); + setState(536); ((PredicateContext)_localctx).kind = match(LIKE); - setState(522); + setState(537); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(524); + setState(539); _la = _input.LA(1); if (_la==NOT) { { - setState(523); + setState(538); match(NOT); } } - setState(526); + setState(541); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(527); + setState(542); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(528); + setState(543); match(IS); - setState(530); + setState(545); _la = _input.LA(1); if (_la==NOT) { { - setState(529); + setState(544); match(NOT); } } - setState(532); + setState(547); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3553,9 +3587,9 @@ public final LikePatternContext likePattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(535); + setState(550); match(LIKE); - setState(536); + setState(551); pattern(); } } @@ -3603,14 +3637,14 @@ public final PatternContext pattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(553); ((PatternContext)_localctx).value = string(); - setState(540); + setState(555); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,73,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) { case 1: { - setState(539); + setState(554); patternEscape(); } break; @@ -3658,25 +3692,25 @@ public final PatternEscapeContext patternEscape() throws RecognitionException { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); enterRule(_localctx, 56, RULE_patternEscape); try { - setState(548); + setState(563); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(542); + setState(557); match(ESCAPE); - setState(543); + setState(558); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(544); + setState(559); match(ESCAPE_ESC); - setState(545); + setState(560); ((PatternEscapeContext)_localctx).escape = string(); - setState(546); + setState(561); match(ESC_END); } break; @@ -3821,7 +3855,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti int _alt; enterOuterAlt(_localctx, 1); { - setState(554); + setState(569); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3891,7 +3925,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _ctx = _localctx; _prevctx = _localctx; - setState(551); + setState(566); primaryExpression(0); } break; @@ -3901,7 +3935,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(552); + setState(567); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3909,7 +3943,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(553); + setState(568); valueExpression(4); } break; @@ -3917,33 +3951,33 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(568); + setState(583); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,77,_ctx); + _alt = getInterpreter().adaptivePredict(_input,81,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(566); + setState(581); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { case 1: { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(556); + setState(571); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(557); + setState(572); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 115)) & ~0x3f) == 0 && ((1L << (_la - 115)) & ((1L << (ASTERISK - 115)) | (1L << (SLASH - 115)) | (1L << (PERCENT - 115)))) != 0)) ) { + if ( !(((((_la - 117)) & ~0x3f) == 0 && ((1L << (_la - 117)) & ((1L << (ASTERISK - 117)) | (1L << (SLASH - 117)) | (1L << (PERCENT - 117)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(558); + setState(573); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3952,9 +3986,9 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(559); + setState(574); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(560); + setState(575); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3962,7 +3996,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(561); + setState(576); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3971,20 +4005,20 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(562); + setState(577); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(563); + setState(578); comparisonOperator(); - setState(564); + setState(579); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(570); + setState(585); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,77,_ctx); + _alt = getInterpreter().adaptivePredict(_input,81,_ctx); } } } @@ -4256,16 +4290,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(607); + setState(622); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,86,_ctx) ) { case 1: { _localctx = new CastContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(572); + setState(587); castExpression(); } break; @@ -4274,7 +4308,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ExtractContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(573); + setState(588); extractExpression(); } break; @@ -4283,7 +4317,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new CurrentDateTimeFunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(574); + setState(589); builtinDateTimeFunction(); } break; @@ -4292,7 +4326,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ConstantDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(575); + setState(590); constant(); } break; @@ -4301,18 +4335,18 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new StarContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(579); + setState(594); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { { - setState(576); + setState(591); qualifiedName(); - setState(577); + setState(592); match(DOT); } } - setState(581); + setState(596); match(ASTERISK); } break; @@ -4321,7 +4355,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(582); + setState(597); functionExpression(); } break; @@ -4330,11 +4364,11 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new SubqueryExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(583); + setState(598); match(T__0); - setState(584); + setState(599); query(); - setState(585); + setState(600); match(T__1); } break; @@ -4343,7 +4377,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(587); + setState(602); qualifiedName(); } break; @@ -4352,11 +4386,11 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(588); + setState(603); match(T__0); - setState(589); + setState(604); expression(); - setState(590); + setState(605); match(T__1); } break; @@ -4365,51 +4399,51 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new CaseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(592); + setState(607); match(CASE); - setState(594); + setState(609); _la = _input.LA(1); - if (((((_la - 1)) & ~0x3f) == 0 && ((1L << (_la - 1)) & ((1L << (T__0 - 1)) | (1L << (ANALYZE - 1)) | (1L << (ANALYZED - 1)) | (1L << (CASE - 1)) | (1L << (CAST - 1)) | (1L << (CATALOGS - 1)) | (1L << (COLUMNS - 1)) | (1L << (CONVERT - 1)) | (1L << (CURRENT_DATE - 1)) | (1L << (CURRENT_TIME - 1)) | (1L << (CURRENT_TIMESTAMP - 1)) | (1L << (DAY - 1)) | (1L << (DEBUG - 1)) | (1L << (EXECUTABLE - 1)) | (1L << (EXISTS - 1)) | (1L << (EXPLAIN - 1)) | (1L << (EXTRACT - 1)) | (1L << (FALSE - 1)) | (1L << (FIRST - 1)) | (1L << (FORMAT - 1)) | (1L << (FULL - 1)) | (1L << (FUNCTIONS - 1)) | (1L << (GRAPHVIZ - 1)) | (1L << (HOUR - 1)) | (1L << (INTERVAL - 1)) | (1L << (LAST - 1)) | (1L << (LEFT - 1)) | (1L << (LIMIT - 1)) | (1L << (MAPPED - 1)) | (1L << (MATCH - 1)) | (1L << (MINUTE - 1)) | (1L << (MONTH - 1)) | (1L << (NOT - 1)) | (1L << (NULL - 1)))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { { - setState(593); + setState(608); ((CaseContext)_localctx).operand = booleanExpression(0); } } - setState(597); + setState(612); _errHandler.sync(this); _la = _input.LA(1); do { { { - setState(596); + setState(611); whenClause(); } } - setState(599); + setState(614); _errHandler.sync(this); _la = _input.LA(1); } while ( _la==WHEN ); - setState(603); + setState(618); _la = _input.LA(1); if (_la==ELSE) { { - setState(601); + setState(616); match(ELSE); - setState(602); + setState(617); ((CaseContext)_localctx).elseClause = booleanExpression(0); } } - setState(605); + setState(620); match(END); } break; } _ctx.stop = _input.LT(-1); - setState(614); + setState(629); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,83,_ctx); + _alt = getInterpreter().adaptivePredict(_input,87,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); @@ -4418,18 +4452,18 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new CastOperatorExpressionContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(609); + setState(624); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(610); + setState(625); match(CAST_OP); - setState(611); + setState(626); dataType(); } } } - setState(616); + setState(631); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,83,_ctx); + _alt = getInterpreter().adaptivePredict(_input,87,_ctx); } } } @@ -4472,26 +4506,26 @@ public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws Rec BuiltinDateTimeFunctionContext _localctx = new BuiltinDateTimeFunctionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_builtinDateTimeFunction); try { - setState(620); + setState(635); switch (_input.LA(1)) { case CURRENT_TIMESTAMP: enterOuterAlt(_localctx, 1); { - setState(617); + setState(632); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); } break; case CURRENT_DATE: enterOuterAlt(_localctx, 2); { - setState(618); + setState(633); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_DATE); } break; case CURRENT_TIME: enterOuterAlt(_localctx, 3); { - setState(619); + setState(634); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIME); } break; @@ -4542,42 +4576,42 @@ public final CastExpressionContext castExpression() throws RecognitionException CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 64, RULE_castExpression); try { - setState(632); + setState(647); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,85,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(622); + setState(637); castTemplate(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(623); + setState(638); match(FUNCTION_ESC); - setState(624); + setState(639); castTemplate(); - setState(625); + setState(640); match(ESC_END); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(627); + setState(642); convertTemplate(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(628); + setState(643); match(FUNCTION_ESC); - setState(629); + setState(644); convertTemplate(); - setState(630); + setState(645); match(ESC_END); } break; @@ -4628,17 +4662,17 @@ public final CastTemplateContext castTemplate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(634); + setState(649); match(CAST); - setState(635); + setState(650); match(T__0); - setState(636); + setState(651); expression(); - setState(637); + setState(652); match(AS); - setState(638); + setState(653); dataType(); - setState(639); + setState(654); match(T__1); } } @@ -4686,17 +4720,17 @@ public final ConvertTemplateContext convertTemplate() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(641); + setState(656); match(CONVERT); - setState(642); + setState(657); match(T__0); - setState(643); + setState(658); expression(); - setState(644); + setState(659); match(T__2); - setState(645); + setState(660); dataType(); - setState(646); + setState(661); match(T__1); } } @@ -4740,23 +4774,23 @@ public final ExtractExpressionContext extractExpression() throws RecognitionExce ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); enterRule(_localctx, 70, RULE_extractExpression); try { - setState(653); + setState(668); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(648); + setState(663); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(649); + setState(664); match(FUNCTION_ESC); - setState(650); + setState(665); extractTemplate(); - setState(651); + setState(666); match(ESC_END); } break; @@ -4810,17 +4844,17 @@ public final ExtractTemplateContext extractTemplate() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(655); + setState(670); match(EXTRACT); - setState(656); + setState(671); match(T__0); - setState(657); + setState(672); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(658); + setState(673); match(FROM); - setState(659); + setState(674); valueExpression(0); - setState(660); + setState(675); match(T__1); } } @@ -4863,7 +4897,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); enterRule(_localctx, 74, RULE_functionExpression); try { - setState(667); + setState(682); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4912,18 +4946,18 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(662); + setState(677); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(663); + setState(678); match(FUNCTION_ESC); - setState(664); + setState(679); functionTemplate(); - setState(665); + setState(680); match(ESC_END); } break; @@ -4981,45 +5015,45 @@ public final FunctionTemplateContext functionTemplate() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(669); + setState(684); functionName(); - setState(670); + setState(685); match(T__0); - setState(682); + setState(697); _la = _input.LA(1); - if (((((_la - 1)) & ~0x3f) == 0 && ((1L << (_la - 1)) & ((1L << (T__0 - 1)) | (1L << (ALL - 1)) | (1L << (ANALYZE - 1)) | (1L << (ANALYZED - 1)) | (1L << (CASE - 1)) | (1L << (CAST - 1)) | (1L << (CATALOGS - 1)) | (1L << (COLUMNS - 1)) | (1L << (CONVERT - 1)) | (1L << (CURRENT_DATE - 1)) | (1L << (CURRENT_TIME - 1)) | (1L << (CURRENT_TIMESTAMP - 1)) | (1L << (DAY - 1)) | (1L << (DEBUG - 1)) | (1L << (DISTINCT - 1)) | (1L << (EXECUTABLE - 1)) | (1L << (EXISTS - 1)) | (1L << (EXPLAIN - 1)) | (1L << (EXTRACT - 1)) | (1L << (FALSE - 1)) | (1L << (FIRST - 1)) | (1L << (FORMAT - 1)) | (1L << (FULL - 1)) | (1L << (FUNCTIONS - 1)) | (1L << (GRAPHVIZ - 1)) | (1L << (HOUR - 1)) | (1L << (INTERVAL - 1)) | (1L << (LAST - 1)) | (1L << (LEFT - 1)) | (1L << (LIMIT - 1)) | (1L << (MAPPED - 1)) | (1L << (MATCH - 1)) | (1L << (MINUTE - 1)) | (1L << (MONTH - 1)) | (1L << (NOT - 1)) | (1L << (NULL - 1)))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RIGHT - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TRUE - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (FUNCTION_ESC - 67)) | (1L << (DATE_ESC - 67)) | (1L << (TIME_ESC - 67)) | (1L << (TIMESTAMP_ESC - 67)) | (1L << (GUID_ESC - 67)) | (1L << (PLUS - 67)) | (1L << (MINUS - 67)) | (1L << (ASTERISK - 67)) | (1L << (PARAM - 67)) | (1L << (STRING - 67)) | (1L << (INTEGER_VALUE - 67)) | (1L << (DECIMAL_VALUE - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { { - setState(672); + setState(687); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(671); + setState(686); setQuantifier(); } } - setState(674); + setState(689); expression(); - setState(679); + setState(694); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(675); + setState(690); match(T__2); - setState(676); + setState(691); expression(); } } - setState(681); + setState(696); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(684); + setState(699); match(T__1); } } @@ -5063,19 +5097,19 @@ public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); enterRule(_localctx, 78, RULE_functionName); try { - setState(689); + setState(704); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(686); + setState(701); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(687); + setState(702); match(RIGHT); } break; @@ -5124,7 +5158,7 @@ public final FunctionNameContext functionName() throws RecognitionException { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(688); + setState(703); identifier(); } break; @@ -5355,13 +5389,13 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 80, RULE_constant); try { int _alt; - setState(717); + setState(732); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(691); + setState(706); match(NULL); } break; @@ -5369,7 +5403,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntervalLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(692); + setState(707); interval(); } break; @@ -5378,7 +5412,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(693); + setState(708); number(); } break; @@ -5387,7 +5421,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(694); + setState(709); booleanValue(); } break; @@ -5395,7 +5429,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(696); + setState(711); _errHandler.sync(this); _alt = 1; do { @@ -5403,7 +5437,7 @@ public final ConstantContext constant() throws RecognitionException { case 1: { { - setState(695); + setState(710); match(STRING); } } @@ -5411,9 +5445,9 @@ public final ConstantContext constant() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(698); + setState(713); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,92,_ctx); + _alt = getInterpreter().adaptivePredict(_input,96,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -5421,7 +5455,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(700); + setState(715); match(PARAM); } break; @@ -5429,11 +5463,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(701); + setState(716); match(DATE_ESC); - setState(702); + setState(717); string(); - setState(703); + setState(718); match(ESC_END); } break; @@ -5441,11 +5475,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(705); + setState(720); match(TIME_ESC); - setState(706); + setState(721); string(); - setState(707); + setState(722); match(ESC_END); } break; @@ -5453,11 +5487,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(709); + setState(724); match(TIMESTAMP_ESC); - setState(710); + setState(725); string(); - setState(711); + setState(726); match(ESC_END); } break; @@ -5465,11 +5499,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(713); + setState(728); match(GUID_ESC); - setState(714); + setState(729); string(); - setState(715); + setState(730); match(ESC_END); } break; @@ -5522,9 +5556,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(719); + setState(734); _la = _input.LA(1); - if ( !(((((_la - 106)) & ~0x3f) == 0 && ((1L << (_la - 106)) & ((1L << (EQ - 106)) | (1L << (NULLEQ - 106)) | (1L << (NEQ - 106)) | (1L << (LT - 106)) | (1L << (LTE - 106)) | (1L << (GT - 106)) | (1L << (GTE - 106)))) != 0)) ) { + if ( !(((((_la - 108)) & ~0x3f) == 0 && ((1L << (_la - 108)) & ((1L << (EQ - 108)) | (1L << (NULLEQ - 108)) | (1L << (NEQ - 108)) | (1L << (LT - 108)) | (1L << (LTE - 108)) | (1L << (GT - 108)) | (1L << (GTE - 108)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5571,7 +5605,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(721); + setState(736); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5639,13 +5673,13 @@ public final IntervalContext interval() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(723); + setState(738); match(INTERVAL); - setState(725); + setState(740); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(724); + setState(739); ((IntervalContext)_localctx).sign = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5656,35 +5690,35 @@ public final IntervalContext interval() throws RecognitionException { } } - setState(729); + setState(744); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: { - setState(727); + setState(742); ((IntervalContext)_localctx).valueNumeric = number(); } break; case PARAM: case STRING: { - setState(728); + setState(743); ((IntervalContext)_localctx).valuePattern = string(); } break; default: throw new NoViableAltException(this); } - setState(731); + setState(746); ((IntervalContext)_localctx).leading = intervalField(); - setState(734); + setState(749); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,96,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { case 1: { - setState(732); + setState(747); match(TO); - setState(733); + setState(748); ((IntervalContext)_localctx).trailing = intervalField(); } break; @@ -5741,9 +5775,9 @@ public final IntervalFieldContext intervalField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(736); + setState(751); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 78)) & ~0x3f) == 0 && ((1L << (_la - 78)) & ((1L << (SECOND - 78)) | (1L << (SECONDS - 78)) | (1L << (YEAR - 78)) | (1L << (YEARS - 78)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (SECOND - 80)) | (1L << (SECONDS - 80)) | (1L << (YEAR - 80)) | (1L << (YEARS - 80)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5799,7 +5833,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(738); + setState(753); identifier(); } } @@ -5851,25 +5885,25 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(745); + setState(760); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,97,_ctx); + _alt = getInterpreter().adaptivePredict(_input,101,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(740); + setState(755); identifier(); - setState(741); + setState(756); match(DOT); } } } - setState(747); + setState(762); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,97,_ctx); + _alt = getInterpreter().adaptivePredict(_input,101,_ctx); } - setState(748); + setState(763); identifier(); } } @@ -5914,13 +5948,13 @@ public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); enterRule(_localctx, 94, RULE_identifier); try { - setState(752); + setState(767); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(750); + setState(765); quoteIdentifier(); } break; @@ -5967,7 +6001,7 @@ public final IdentifierContext identifier() throws RecognitionException { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(751); + setState(766); unquoteIdentifier(); } break; @@ -6020,43 +6054,43 @@ public final TableIdentifierContext tableIdentifier() throws RecognitionExceptio enterRule(_localctx, 96, RULE_tableIdentifier); int _la; try { - setState(766); + setState(781); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,101,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,105,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(757); + setState(772); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)) | (1L << (IDENTIFIER - 67)) | (1L << (DIGIT_IDENTIFIER - 67)) | (1L << (QUOTED_IDENTIFIER - 67)) | (1L << (BACKQUOTED_IDENTIFIER - 67)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { { - setState(754); + setState(769); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(755); + setState(770); match(T__3); } } - setState(759); + setState(774); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(763); + setState(778); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,104,_ctx) ) { case 1: { - setState(760); + setState(775); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(761); + setState(776); match(T__3); } break; } - setState(765); + setState(780); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -6123,13 +6157,13 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 98, RULE_quoteIdentifier); try { - setState(770); + setState(785); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(768); + setState(783); match(QUOTED_IDENTIFIER); } break; @@ -6137,7 +6171,7 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(769); + setState(784); match(BACKQUOTED_IDENTIFIER); } break; @@ -6209,13 +6243,13 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); enterRule(_localctx, 100, RULE_unquoteIdentifier); try { - setState(775); + setState(790); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(772); + setState(787); match(IDENTIFIER); } break; @@ -6261,7 +6295,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(773); + setState(788); nonReserved(); } break; @@ -6269,7 +6303,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(774); + setState(789); match(DIGIT_IDENTIFIER); } break; @@ -6338,13 +6372,13 @@ public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); enterRule(_localctx, 102, RULE_number); try { - setState(779); + setState(794); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(777); + setState(792); match(DECIMAL_VALUE); } break; @@ -6352,7 +6386,7 @@ public final NumberContext number() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(778); + setState(793); match(INTEGER_VALUE); } break; @@ -6400,7 +6434,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(781); + setState(796); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -6456,13 +6490,13 @@ public final WhenClauseContext whenClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(783); + setState(798); match(WHEN); - setState(784); + setState(799); ((WhenClauseContext)_localctx).condition = expression(); - setState(785); + setState(800); match(THEN); - setState(786); + setState(801); ((WhenClauseContext)_localctx).result = expression(); } } @@ -6543,9 +6577,9 @@ public final NonReservedContext nonReserved() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(788); + setState(803); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 67)) & ~0x3f) == 0 && ((1L << (_la - 67)) & ((1L << (OPTIMIZED - 67)) | (1L << (PARSED - 67)) | (1L << (PHYSICAL - 67)) | (1L << (PLAN - 67)) | (1L << (RLIKE - 67)) | (1L << (QUERY - 67)) | (1L << (SCHEMAS - 67)) | (1L << (SECOND - 67)) | (1L << (SHOW - 67)) | (1L << (SYS - 67)) | (1L << (TABLES - 67)) | (1L << (TEXT - 67)) | (1L << (TYPE - 67)) | (1L << (TYPES - 67)) | (1L << (VERIFY - 67)) | (1L << (YEAR - 67)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -6603,7 +6637,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u0088\u0319\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008a\u0328\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -6613,312 +6647,318 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3"+ "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0080\n\4\f\4\16\4\u0083\13\4\3\4\5"+ "\4\u0086\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u008f\n\4\f\4\16\4\u0092"+ - "\13\4\3\4\5\4\u0095\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009c\n\4\3\4\3\4\3\4"+ - "\3\4\3\4\5\4\u00a3\n\4\3\4\3\4\3\4\5\4\u00a8\n\4\3\4\3\4\3\4\5\4\u00ad"+ - "\n\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00b5\n\4\3\4\3\4\5\4\u00b9\n\4\3\4\3"+ - "\4\3\4\3\4\7\4\u00bf\n\4\f\4\16\4\u00c2\13\4\5\4\u00c4\n\4\3\4\3\4\3\4"+ - "\3\4\5\4\u00ca\n\4\3\4\3\4\3\4\5\4\u00cf\n\4\3\4\5\4\u00d2\n\4\3\4\3\4"+ - "\3\4\5\4\u00d7\n\4\3\4\5\4\u00da\n\4\5\4\u00dc\n\4\3\5\3\5\3\5\3\5\7\5"+ - "\u00e2\n\5\f\5\16\5\u00e5\13\5\5\5\u00e7\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3"+ - "\6\3\6\7\6\u00f1\n\6\f\6\16\6\u00f4\13\6\5\6\u00f6\n\6\3\6\5\6\u00f9\n"+ - "\6\3\7\3\7\3\7\3\7\3\7\5\7\u0100\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u0107\n\b"+ - "\3\t\3\t\5\t\u010b\n\t\3\t\3\t\5\t\u010f\n\t\3\n\3\n\5\n\u0113\n\n\3\n"+ - "\3\n\3\n\7\n\u0118\n\n\f\n\16\n\u011b\13\n\3\n\5\n\u011e\n\n\3\n\3\n\5"+ - "\n\u0122\n\n\3\n\3\n\3\n\5\n\u0127\n\n\3\n\3\n\5\n\u012b\n\n\3\13\3\13"+ - "\3\13\3\13\7\13\u0131\n\13\f\13\16\13\u0134\13\13\3\f\5\f\u0137\n\f\3"+ - "\f\3\f\3\f\7\f\u013c\n\f\f\f\16\f\u013f\13\f\3\r\3\r\3\16\3\16\3\16\3"+ - "\16\7\16\u0147\n\16\f\16\16\16\u014a\13\16\5\16\u014c\n\16\3\16\3\16\5"+ - "\16\u0150\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21"+ - "\u015c\n\21\3\21\5\21\u015f\n\21\3\22\3\22\7\22\u0163\n\22\f\22\16\22"+ - "\u0166\13\22\3\23\3\23\3\23\3\23\5\23\u016c\n\23\3\23\3\23\3\23\3\23\3"+ - "\23\5\23\u0173\n\23\3\24\5\24\u0176\n\24\3\24\3\24\5\24\u017a\n\24\3\24"+ - "\3\24\5\24\u017e\n\24\3\24\3\24\5\24\u0182\n\24\5\24\u0184\n\24\3\25\3"+ - "\25\3\25\3\25\3\25\3\25\3\25\7\25\u018d\n\25\f\25\16\25\u0190\13\25\3"+ - "\25\3\25\5\25\u0194\n\25\3\26\3\26\5\26\u0198\n\26\3\26\5\26\u019b\n\26"+ - "\3\26\3\26\3\26\3\26\5\26\u01a1\n\26\3\26\5\26\u01a4\n\26\3\26\3\26\3"+ - "\26\3\26\5\26\u01aa\n\26\3\26\5\26\u01ad\n\26\5\26\u01af\n\26\3\27\3\27"+ - "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\5\30\u01d2\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01da"+ - "\n\30\f\30\16\30\u01dd\13\30\3\31\3\31\7\31\u01e1\n\31\f\31\16\31\u01e4"+ - "\13\31\3\32\3\32\5\32\u01e8\n\32\3\33\5\33\u01eb\n\33\3\33\3\33\3\33\3"+ - "\33\3\33\3\33\5\33\u01f3\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u01fa\n\33"+ - "\f\33\16\33\u01fd\13\33\3\33\3\33\3\33\5\33\u0202\n\33\3\33\3\33\3\33"+ - "\3\33\3\33\3\33\5\33\u020a\n\33\3\33\3\33\3\33\5\33\u020f\n\33\3\33\3"+ - "\33\3\33\3\33\5\33\u0215\n\33\3\33\5\33\u0218\n\33\3\34\3\34\3\34\3\35"+ - "\3\35\5\35\u021f\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0227\n\36\3"+ - "\37\3\37\3\37\3\37\5\37\u022d\n\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37"+ - "\3\37\3\37\3\37\7\37\u0239\n\37\f\37\16\37\u023c\13\37\3 \3 \3 \3 \3 "+ - "\3 \3 \3 \5 \u0246\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0255"+ - "\n \3 \6 \u0258\n \r \16 \u0259\3 \3 \5 \u025e\n \3 \3 \5 \u0262\n \3"+ - " \3 \3 \7 \u0267\n \f \16 \u026a\13 \3!\3!\3!\5!\u026f\n!\3\"\3\"\3\""+ - "\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u027b\n\"\3#\3#\3#\3#\3#\3#\3#\3$\3$"+ - "\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u0290\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3"+ - "\'\3\'\3\'\3\'\5\'\u029e\n\'\3(\3(\3(\5(\u02a3\n(\3(\3(\3(\7(\u02a8\n"+ - "(\f(\16(\u02ab\13(\5(\u02ad\n(\3(\3(\3)\3)\3)\5)\u02b4\n)\3*\3*\3*\3*"+ - "\3*\6*\u02bb\n*\r*\16*\u02bc\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3"+ - "*\3*\3*\3*\5*\u02d0\n*\3+\3+\3,\3,\3-\3-\5-\u02d8\n-\3-\3-\5-\u02dc\n"+ - "-\3-\3-\3-\5-\u02e1\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02ea\n\60\f\60"+ - "\16\60\u02ed\13\60\3\60\3\60\3\61\3\61\5\61\u02f3\n\61\3\62\3\62\3\62"+ - "\5\62\u02f8\n\62\3\62\3\62\3\62\3\62\5\62\u02fe\n\62\3\62\5\62\u0301\n"+ - "\62\3\63\3\63\5\63\u0305\n\63\3\64\3\64\3\64\5\64\u030a\n\64\3\65\3\65"+ - "\5\65\u030e\n\65\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\2\5.<>9\2"+ + "\13\4\3\4\5\4\u0095\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009c\n\4\3\4\3\4\5\4"+ + "\u00a0\n\4\3\4\3\4\3\4\3\4\5\4\u00a6\n\4\3\4\3\4\3\4\5\4\u00ab\n\4\3\4"+ + "\3\4\3\4\5\4\u00b0\n\4\3\4\3\4\5\4\u00b4\n\4\3\4\3\4\3\4\5\4\u00b9\n\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c1\n\4\3\4\3\4\5\4\u00c5\n\4\3\4\3\4\3"+ + "\4\3\4\7\4\u00cb\n\4\f\4\16\4\u00ce\13\4\5\4\u00d0\n\4\3\4\3\4\3\4\3\4"+ + "\5\4\u00d6\n\4\3\4\3\4\3\4\5\4\u00db\n\4\3\4\5\4\u00de\n\4\3\4\3\4\3\4"+ + "\5\4\u00e3\n\4\3\4\5\4\u00e6\n\4\5\4\u00e8\n\4\3\5\3\5\3\5\3\5\7\5\u00ee"+ + "\n\5\f\5\16\5\u00f1\13\5\5\5\u00f3\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6"+ + "\7\6\u00fd\n\6\f\6\16\6\u0100\13\6\5\6\u0102\n\6\3\6\5\6\u0105\n\6\3\7"+ + "\3\7\3\7\3\7\3\7\5\7\u010c\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u0113\n\b\3\t\3"+ + "\t\5\t\u0117\n\t\3\t\3\t\5\t\u011b\n\t\3\n\3\n\5\n\u011f\n\n\3\n\3\n\3"+ + "\n\7\n\u0124\n\n\f\n\16\n\u0127\13\n\3\n\5\n\u012a\n\n\3\n\3\n\5\n\u012e"+ + "\n\n\3\n\3\n\3\n\5\n\u0133\n\n\3\n\3\n\5\n\u0137\n\n\3\13\3\13\3\13\3"+ + "\13\7\13\u013d\n\13\f\13\16\13\u0140\13\13\3\f\5\f\u0143\n\f\3\f\3\f\3"+ + "\f\7\f\u0148\n\f\f\f\16\f\u014b\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ + "\u0153\n\16\f\16\16\16\u0156\13\16\5\16\u0158\n\16\3\16\3\16\5\16\u015c"+ + "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0168\n\21"+ + "\3\21\5\21\u016b\n\21\3\22\3\22\7\22\u016f\n\22\f\22\16\22\u0172\13\22"+ + "\3\23\3\23\3\23\3\23\5\23\u0178\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u017f"+ + "\n\23\3\24\5\24\u0182\n\24\3\24\3\24\5\24\u0186\n\24\3\24\3\24\5\24\u018a"+ + "\n\24\3\24\3\24\5\24\u018e\n\24\5\24\u0190\n\24\3\25\3\25\3\25\3\25\3"+ + "\25\3\25\3\25\7\25\u0199\n\25\f\25\16\25\u019c\13\25\3\25\3\25\5\25\u01a0"+ + "\n\25\3\26\5\26\u01a3\n\26\3\26\3\26\5\26\u01a7\n\26\3\26\5\26\u01aa\n"+ + "\26\3\26\3\26\3\26\3\26\5\26\u01b0\n\26\3\26\5\26\u01b3\n\26\3\26\3\26"+ + "\3\26\3\26\5\26\u01b9\n\26\3\26\5\26\u01bc\n\26\5\26\u01be\n\26\3\27\3"+ + "\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\5\30\u01e1\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30"+ + "\u01e9\n\30\f\30\16\30\u01ec\13\30\3\31\3\31\7\31\u01f0\n\31\f\31\16\31"+ + "\u01f3\13\31\3\32\3\32\5\32\u01f7\n\32\3\33\5\33\u01fa\n\33\3\33\3\33"+ + "\3\33\3\33\3\33\3\33\5\33\u0202\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u0209"+ + "\n\33\f\33\16\33\u020c\13\33\3\33\3\33\3\33\5\33\u0211\n\33\3\33\3\33"+ + "\3\33\3\33\3\33\3\33\5\33\u0219\n\33\3\33\3\33\3\33\5\33\u021e\n\33\3"+ + "\33\3\33\3\33\3\33\5\33\u0224\n\33\3\33\5\33\u0227\n\33\3\34\3\34\3\34"+ + "\3\35\3\35\5\35\u022e\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0236\n"+ + "\36\3\37\3\37\3\37\3\37\5\37\u023c\n\37\3\37\3\37\3\37\3\37\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\7\37\u0248\n\37\f\37\16\37\u024b\13\37\3 \3 \3 \3"+ + " \3 \3 \3 \3 \5 \u0255\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0264"+ + "\n \3 \6 \u0267\n \r \16 \u0268\3 \3 \5 \u026d\n \3 \3 \5 \u0271\n \3"+ + " \3 \3 \7 \u0276\n \f \16 \u0279\13 \3!\3!\3!\5!\u027e\n!\3\"\3\"\3\""+ + "\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u028a\n\"\3#\3#\3#\3#\3#\3#\3#\3$\3$"+ + "\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u029f\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3"+ + "\'\3\'\3\'\3\'\5\'\u02ad\n\'\3(\3(\3(\5(\u02b2\n(\3(\3(\3(\7(\u02b7\n"+ + "(\f(\16(\u02ba\13(\5(\u02bc\n(\3(\3(\3)\3)\3)\5)\u02c3\n)\3*\3*\3*\3*"+ + "\3*\6*\u02ca\n*\r*\16*\u02cb\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3"+ + "*\3*\3*\3*\5*\u02df\n*\3+\3+\3,\3,\3-\3-\5-\u02e7\n-\3-\3-\5-\u02eb\n"+ + "-\3-\3-\3-\5-\u02f0\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02f9\n\60\f\60"+ + "\16\60\u02fc\13\60\3\60\3\60\3\61\3\61\5\61\u0302\n\61\3\62\3\62\3\62"+ + "\5\62\u0307\n\62\3\62\3\62\3\62\3\62\5\62\u030d\n\62\3\62\5\62\u0310\n"+ + "\62\3\63\3\63\5\63\u0314\n\63\3\64\3\64\3\64\5\64\u0319\n\64\3\65\3\65"+ + "\5\65\u031d\n\65\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\2\5.<>9\2"+ "\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJL"+ - "NPRTVXZ\\^`bdfhjln\2\22\b\2\7\7\t\t\"\"::EEII\4\2,,WW\4\2\t\tEE\4\2))"+ - "\61\61\3\2\34\35\3\2st\4\2\7\7}}\4\2\r\r\34\34\4\2\'\'\66\66\4\2\7\7\36"+ - "\36\3\2uw\3\2lr\4\2&&YY\7\2\31\32/\60>EEIKMPSTVW[\\^^bb\u0378\2p\3\2\2"+ - "\2\4s\3\2\2\2\6\u00db\3\2\2\2\b\u00e6\3\2\2\2\n\u00ea\3\2\2\2\f\u00ff"+ - "\3\2\2\2\16\u0106\3\2\2\2\20\u0108\3\2\2\2\22\u0110\3\2\2\2\24\u012c\3"+ - "\2\2\2\26\u0136\3\2\2\2\30\u0140\3\2\2\2\32\u014f\3\2\2\2\34\u0151\3\2"+ - "\2\2\36\u0157\3\2\2\2 \u0159\3\2\2\2\"\u0160\3\2\2\2$\u0172\3\2\2\2&\u0183"+ - "\3\2\2\2(\u0193\3\2\2\2*\u01ae\3\2\2\2,\u01b0\3\2\2\2.\u01d1\3\2\2\2\60"+ - "\u01e2\3\2\2\2\62\u01e5\3\2\2\2\64\u0217\3\2\2\2\66\u0219\3\2\2\28\u021c"+ - "\3\2\2\2:\u0226\3\2\2\2<\u022c\3\2\2\2>\u0261\3\2\2\2@\u026e\3\2\2\2B"+ - "\u027a\3\2\2\2D\u027c\3\2\2\2F\u0283\3\2\2\2H\u028f\3\2\2\2J\u0291\3\2"+ - "\2\2L\u029d\3\2\2\2N\u029f\3\2\2\2P\u02b3\3\2\2\2R\u02cf\3\2\2\2T\u02d1"+ - "\3\2\2\2V\u02d3\3\2\2\2X\u02d5\3\2\2\2Z\u02e2\3\2\2\2\\\u02e4\3\2\2\2"+ - "^\u02eb\3\2\2\2`\u02f2\3\2\2\2b\u0300\3\2\2\2d\u0304\3\2\2\2f\u0309\3"+ - "\2\2\2h\u030d\3\2\2\2j\u030f\3\2\2\2l\u0311\3\2\2\2n\u0316\3\2\2\2pq\5"+ - "\6\4\2qr\7\2\2\3r\3\3\2\2\2st\5,\27\2tu\7\2\2\3u\5\3\2\2\2v\u00dc\5\b"+ - "\5\2w\u0085\7$\2\2x\u0081\7\3\2\2yz\7K\2\2z\u0080\t\2\2\2{|\7(\2\2|\u0080"+ - "\t\3\2\2}~\7^\2\2~\u0080\5V,\2\177y\3\2\2\2\177{\3\2\2\2\177}\3\2\2\2"+ + "NPRTVXZ\\^`bdfhjln\2\22\b\2\7\7\t\t\"\"<ARSde\3\2}~\30\2\b\t\23\24"+ + "\26\31\33\33\"\"$$\'(+-\60\60\65\6588;<>>@@GGKMORUVXY]^``dd\u038b\2p\3"+ + "\2\2\2\4s\3\2\2\2\6\u00e7\3\2\2\2\b\u00f2\3\2\2\2\n\u00f6\3\2\2\2\f\u010b"+ + "\3\2\2\2\16\u0112\3\2\2\2\20\u0114\3\2\2\2\22\u011c\3\2\2\2\24\u0138\3"+ + "\2\2\2\26\u0142\3\2\2\2\30\u014c\3\2\2\2\32\u015b\3\2\2\2\34\u015d\3\2"+ + "\2\2\36\u0163\3\2\2\2 \u0165\3\2\2\2\"\u016c\3\2\2\2$\u017e\3\2\2\2&\u018f"+ + "\3\2\2\2(\u019f\3\2\2\2*\u01bd\3\2\2\2,\u01bf\3\2\2\2.\u01e0\3\2\2\2\60"+ + "\u01f1\3\2\2\2\62\u01f4\3\2\2\2\64\u0226\3\2\2\2\66\u0228\3\2\2\28\u022b"+ + "\3\2\2\2:\u0235\3\2\2\2<\u023b\3\2\2\2>\u0270\3\2\2\2@\u027d\3\2\2\2B"+ + "\u0289\3\2\2\2D\u028b\3\2\2\2F\u0292\3\2\2\2H\u029e\3\2\2\2J\u02a0\3\2"+ + "\2\2L\u02ac\3\2\2\2N\u02ae\3\2\2\2P\u02c2\3\2\2\2R\u02de\3\2\2\2T\u02e0"+ + "\3\2\2\2V\u02e2\3\2\2\2X\u02e4\3\2\2\2Z\u02f1\3\2\2\2\\\u02f3\3\2\2\2"+ + "^\u02fa\3\2\2\2`\u0301\3\2\2\2b\u030f\3\2\2\2d\u0313\3\2\2\2f\u0318\3"+ + "\2\2\2h\u031c\3\2\2\2j\u031e\3\2\2\2l\u0320\3\2\2\2n\u0325\3\2\2\2pq\5"+ + "\6\4\2qr\7\2\2\3r\3\3\2\2\2st\5,\27\2tu\7\2\2\3u\5\3\2\2\2v\u00e8\5\b"+ + "\5\2w\u0085\7$\2\2x\u0081\7\3\2\2yz\7M\2\2z\u0080\t\2\2\2{|\7(\2\2|\u0080"+ + "\t\3\2\2}~\7`\2\2~\u0080\5V,\2\177y\3\2\2\2\177{\3\2\2\2\177}\3\2\2\2"+ "\u0080\u0083\3\2\2\2\u0081\177\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0084"+ "\3\2\2\2\u0083\u0081\3\2\2\2\u0084\u0086\7\4\2\2\u0085x\3\2\2\2\u0085"+ - "\u0086\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u00dc\5\6\4\2\u0088\u0094\7\33"+ - "\2\2\u0089\u0090\7\3\2\2\u008a\u008b\7K\2\2\u008b\u008f\t\4\2\2\u008c"+ + "\u0086\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u00e8\5\6\4\2\u0088\u0094\7\33"+ + "\2\2\u0089\u0090\7\3\2\2\u008a\u008b\7M\2\2\u008b\u008f\t\4\2\2\u008c"+ "\u008d\7(\2\2\u008d\u008f\t\3\2\2\u008e\u008a\3\2\2\2\u008e\u008c\3\2"+ "\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091"+ "\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093\u0095\7\4\2\2\u0094\u0089\3\2"+ - "\2\2\u0094\u0095\3\2\2\2\u0095\u0096\3\2\2\2\u0096\u00dc\5\6\4\2\u0097"+ - "\u0098\7S\2\2\u0098\u009b\7V\2\2\u0099\u009c\5\66\34\2\u009a\u009c\5b"+ - "\62\2\u009b\u0099\3\2\2\2\u009b\u009a\3\2\2\2\u009b\u009c\3\2\2\2\u009c"+ - "\u00dc\3\2\2\2\u009d\u009e\7S\2\2\u009e\u009f\7\24\2\2\u009f\u00a2\t\5"+ - "\2\2\u00a0\u00a3\5\66\34\2\u00a1\u00a3\5b\62\2\u00a2\u00a0\3\2\2\2\u00a2"+ - "\u00a1\3\2\2\2\u00a3\u00dc\3\2\2\2\u00a4\u00a7\t\6\2\2\u00a5\u00a8\5\66"+ - "\34\2\u00a6\u00a8\5b\62\2\u00a7\u00a5\3\2\2\2\u00a7\u00a6\3\2\2\2\u00a8"+ - "\u00dc\3\2\2\2\u00a9\u00aa\7S\2\2\u00aa\u00ac\7+\2\2\u00ab\u00ad\5\66"+ - "\34\2\u00ac\u00ab\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00dc\3\2\2\2\u00ae"+ - "\u00af\7S\2\2\u00af\u00dc\7O\2\2\u00b0\u00b1\7T\2\2\u00b1\u00b4\7V\2\2"+ - "\u00b2\u00b3\7\22\2\2\u00b3\u00b5\5\66\34\2\u00b4\u00b2\3\2\2\2\u00b4"+ - "\u00b5\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b9\5\66\34\2\u00b7\u00b9\5"+ - "b\62\2\u00b8\u00b6\3\2\2\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9"+ - "\u00c3\3\2\2\2\u00ba\u00bb\7[\2\2\u00bb\u00c0\5j\66\2\u00bc\u00bd\7\5"+ - "\2\2\u00bd\u00bf\5j\66\2\u00be\u00bc\3\2\2\2\u00bf\u00c2\3\2\2\2\u00c0"+ - "\u00be\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c0\3\2"+ - "\2\2\u00c3\u00ba\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00dc\3\2\2\2\u00c5"+ - "\u00c6\7T\2\2\u00c6\u00c9\7\24\2\2\u00c7\u00c8\7\22\2\2\u00c8\u00ca\5"+ - "j\66\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00ce\3\2\2\2\u00cb"+ - "\u00cc\7U\2\2\u00cc\u00cf\5\66\34\2\u00cd\u00cf\5b\62\2\u00ce\u00cb\3"+ - "\2\2\2\u00ce\u00cd\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d1\3\2\2\2\u00d0"+ - "\u00d2\5\66\34\2\u00d1\u00d0\3\2\2\2\u00d1\u00d2\3\2\2\2\u00d2\u00dc\3"+ - "\2\2\2\u00d3\u00d4\7T\2\2\u00d4\u00d9\7\\\2\2\u00d5\u00d7\t\7\2\2\u00d6"+ - "\u00d5\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00da\5h"+ - "\65\2\u00d9\u00d6\3\2\2\2\u00d9\u00da\3\2\2\2\u00da\u00dc\3\2\2\2\u00db"+ - "v\3\2\2\2\u00dbw\3\2\2\2\u00db\u0088\3\2\2\2\u00db\u0097\3\2\2\2\u00db"+ - "\u009d\3\2\2\2\u00db\u00a4\3\2\2\2\u00db\u00a9\3\2\2\2\u00db\u00ae\3\2"+ - "\2\2\u00db\u00b0\3\2\2\2\u00db\u00c5\3\2\2\2\u00db\u00d3\3\2\2\2\u00dc"+ - "\7\3\2\2\2\u00dd\u00de\7a\2\2\u00de\u00e3\5\34\17\2\u00df\u00e0\7\5\2"+ - "\2\u00e0\u00e2\5\34\17\2\u00e1\u00df\3\2\2\2\u00e2\u00e5\3\2\2\2\u00e3"+ - "\u00e1\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5\u00e3\3\2"+ - "\2\2\u00e6\u00dd\3\2\2\2\u00e6\u00e7\3\2\2\2\u00e7\u00e8\3\2\2\2\u00e8"+ - "\u00e9\5\n\6\2\u00e9\t\3\2\2\2\u00ea\u00f5\5\16\b\2\u00eb\u00ec\7G\2\2"+ - "\u00ec\u00ed\7\17\2\2\u00ed\u00f2\5\20\t\2\u00ee\u00ef\7\5\2\2\u00ef\u00f1"+ - "\5\20\t\2\u00f0\u00ee\3\2\2\2\u00f1\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2"+ - "\u00f2\u00f3\3\2\2\2\u00f3\u00f6\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f5\u00eb"+ - "\3\2\2\2\u00f5\u00f6\3\2\2\2\u00f6\u00f8\3\2\2\2\u00f7\u00f9\5\f\7\2\u00f8"+ - "\u00f7\3\2\2\2\u00f8\u00f9\3\2\2\2\u00f9\13\3\2\2\2\u00fa\u00fb\79\2\2"+ - "\u00fb\u0100\t\b\2\2\u00fc\u00fd\7f\2\2\u00fd\u00fe\t\b\2\2\u00fe\u0100"+ - "\7k\2\2\u00ff\u00fa\3\2\2\2\u00ff\u00fc\3\2\2\2\u0100\r\3\2\2\2\u0101"+ - "\u0107\5\22\n\2\u0102\u0103\7\3\2\2\u0103\u0104\5\n\6\2\u0104\u0105\7"+ - "\4\2\2\u0105\u0107\3\2\2\2\u0106\u0101\3\2\2\2\u0106\u0102\3\2\2\2\u0107"+ - "\17\3\2\2\2\u0108\u010a\5,\27\2\u0109\u010b\t\t\2\2\u010a\u0109\3\2\2"+ - "\2\u010a\u010b\3\2\2\2\u010b\u010e\3\2\2\2\u010c\u010d\7C\2\2\u010d\u010f"+ - "\t\n\2\2\u010e\u010c\3\2\2\2\u010e\u010f\3\2\2\2\u010f\21\3\2\2\2\u0110"+ - "\u0112\7R\2\2\u0111\u0113\5\36\20\2\u0112\u0111\3\2\2\2\u0112\u0113\3"+ - "\2\2\2\u0113\u0114\3\2\2\2\u0114\u0119\5 \21\2\u0115\u0116\7\5\2\2\u0116"+ - "\u0118\5 \21\2\u0117\u0115\3\2\2\2\u0118\u011b\3\2\2\2\u0119\u0117\3\2"+ - "\2\2\u0119\u011a\3\2\2\2\u011a\u011d\3\2\2\2\u011b\u0119\3\2\2\2\u011c"+ - "\u011e\5\24\13\2\u011d\u011c\3\2\2\2\u011d\u011e\3\2\2\2\u011e\u0121\3"+ - "\2\2\2\u011f\u0120\7`\2\2\u0120\u0122\5.\30\2\u0121\u011f\3\2\2\2\u0121"+ - "\u0122\3\2\2\2\u0122\u0126\3\2\2\2\u0123\u0124\7-\2\2\u0124\u0125\7\17"+ - "\2\2\u0125\u0127\5\26\f\2\u0126\u0123\3\2\2\2\u0126\u0127\3\2\2\2\u0127"+ - "\u012a\3\2\2\2\u0128\u0129\7.\2\2\u0129\u012b\5.\30\2\u012a\u0128\3\2"+ - "\2\2\u012a\u012b\3\2\2\2\u012b\23\3\2\2\2\u012c\u012d\7)\2\2\u012d\u0132"+ - "\5\"\22\2\u012e\u012f\7\5\2\2\u012f\u0131\5\"\22\2\u0130\u012e\3\2\2\2"+ - "\u0131\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2\2\2\u0133\25"+ - "\3\2\2\2\u0134\u0132\3\2\2\2\u0135\u0137\5\36\20\2\u0136\u0135\3\2\2\2"+ - "\u0136\u0137\3\2\2\2\u0137\u0138\3\2\2\2\u0138\u013d\5\30\r\2\u0139\u013a"+ - "\7\5\2\2\u013a\u013c\5\30\r\2\u013b\u0139\3\2\2\2\u013c\u013f\3\2\2\2"+ - "\u013d\u013b\3\2\2\2\u013d\u013e\3\2\2\2\u013e\27\3\2\2\2\u013f\u013d"+ - "\3\2\2\2\u0140\u0141\5\32\16\2\u0141\31\3\2\2\2\u0142\u014b\7\3\2\2\u0143"+ - "\u0148\5,\27\2\u0144\u0145\7\5\2\2\u0145\u0147\5,\27\2\u0146\u0144\3\2"+ - "\2\2\u0147\u014a\3\2\2\2\u0148\u0146\3\2\2\2\u0148\u0149\3\2\2\2\u0149"+ - "\u014c\3\2\2\2\u014a\u0148\3\2\2\2\u014b\u0143\3\2\2\2\u014b\u014c\3\2"+ - "\2\2\u014c\u014d\3\2\2\2\u014d\u0150\7\4\2\2\u014e\u0150\5,\27\2\u014f"+ - "\u0142\3\2\2\2\u014f\u014e\3\2\2\2\u0150\33\3\2\2\2\u0151\u0152\5`\61"+ - "\2\u0152\u0153\7\f\2\2\u0153\u0154\7\3\2\2\u0154\u0155\5\n\6\2\u0155\u0156"+ - "\7\4\2\2\u0156\35\3\2\2\2\u0157\u0158\t\13\2\2\u0158\37\3\2\2\2\u0159"+ - "\u015e\5,\27\2\u015a\u015c\7\f\2\2\u015b\u015a\3\2\2\2\u015b\u015c\3\2"+ - "\2\2\u015c\u015d\3\2\2\2\u015d\u015f\5`\61\2\u015e\u015b\3\2\2\2\u015e"+ - "\u015f\3\2\2\2\u015f!\3\2\2\2\u0160\u0164\5*\26\2\u0161\u0163\5$\23\2"+ - "\u0162\u0161\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0164\u0165"+ - "\3\2\2\2\u0165#\3\2\2\2\u0166\u0164\3\2\2\2\u0167\u0168\5&\24\2\u0168"+ - "\u0169\7\65\2\2\u0169\u016b\5*\26\2\u016a\u016c\5(\25\2\u016b\u016a\3"+ - "\2\2\2\u016b\u016c\3\2\2\2\u016c\u0173\3\2\2\2\u016d\u016e\7@\2\2\u016e"+ - "\u016f\5&\24\2\u016f\u0170\7\65\2\2\u0170\u0171\5*\26\2\u0171\u0173\3"+ - "\2\2\2\u0172\u0167\3\2\2\2\u0172\u016d\3\2\2\2\u0173%\3\2\2\2\u0174\u0176"+ - "\7\62\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0184\3\2\2\2"+ - "\u0177\u0179\7\67\2\2\u0178\u017a\7H\2\2\u0179\u0178\3\2\2\2\u0179\u017a"+ - "\3\2\2\2\u017a\u0184\3\2\2\2\u017b\u017d\7L\2\2\u017c\u017e\7H\2\2\u017d"+ - "\u017c\3\2\2\2\u017d\u017e\3\2\2\2\u017e\u0184\3\2\2\2\u017f\u0181\7*"+ - "\2\2\u0180\u0182\7H\2\2\u0181\u0180\3\2\2\2\u0181\u0182\3\2\2\2\u0182"+ - "\u0184\3\2\2\2\u0183\u0175\3\2\2\2\u0183\u0177\3\2\2\2\u0183\u017b\3\2"+ - "\2\2\u0183\u017f\3\2\2\2\u0184\'\3\2\2\2\u0185\u0186\7D\2\2\u0186\u0194"+ - "\5.\30\2\u0187\u0188\7]\2\2\u0188\u0189\7\3\2\2\u0189\u018e\5`\61\2\u018a"+ - "\u018b\7\5\2\2\u018b\u018d\5`\61\2\u018c\u018a\3\2\2\2\u018d\u0190\3\2"+ - "\2\2\u018e\u018c\3\2\2\2\u018e\u018f\3\2\2\2\u018f\u0191\3\2\2\2\u0190"+ - "\u018e\3\2\2\2\u0191\u0192\7\4\2\2\u0192\u0194\3\2\2\2\u0193\u0185\3\2"+ - "\2\2\u0193\u0187\3\2\2\2\u0194)\3\2\2\2\u0195\u019a\5b\62\2\u0196\u0198"+ - "\7\f\2\2\u0197\u0196\3\2\2\2\u0197\u0198\3\2\2\2\u0198\u0199\3\2\2\2\u0199"+ - "\u019b\5^\60\2\u019a\u0197\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u01af\3\2"+ - "\2\2\u019c\u019d\7\3\2\2\u019d\u019e\5\n\6\2\u019e\u01a3\7\4\2\2\u019f"+ - "\u01a1\7\f\2\2\u01a0\u019f\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a2\3\2"+ - "\2\2\u01a2\u01a4\5^\60\2\u01a3\u01a0\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4"+ - "\u01af\3\2\2\2\u01a5\u01a6\7\3\2\2\u01a6\u01a7\5\"\22\2\u01a7\u01ac\7"+ - "\4\2\2\u01a8\u01aa\7\f\2\2\u01a9\u01a8\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa"+ - "\u01ab\3\2\2\2\u01ab\u01ad\5^\60\2\u01ac\u01a9\3\2\2\2\u01ac\u01ad\3\2"+ - "\2\2\u01ad\u01af\3\2\2\2\u01ae\u0195\3\2\2\2\u01ae\u019c\3\2\2\2\u01ae"+ - "\u01a5\3\2\2\2\u01af+\3\2\2\2\u01b0\u01b1\5.\30\2\u01b1-\3\2\2\2\u01b2"+ - "\u01b3\b\30\1\2\u01b3\u01b4\7A\2\2\u01b4\u01d2\5.\30\n\u01b5\u01b6\7#"+ - "\2\2\u01b6\u01b7\7\3\2\2\u01b7\u01b8\5\b\5\2\u01b8\u01b9\7\4\2\2\u01b9"+ - "\u01d2\3\2\2\2\u01ba\u01bb\7N\2\2\u01bb\u01bc\7\3\2\2\u01bc\u01bd\5j\66"+ - "\2\u01bd\u01be\5\60\31\2\u01be\u01bf\7\4\2\2\u01bf\u01d2\3\2\2\2\u01c0"+ - "\u01c1\7;\2\2\u01c1\u01c2\7\3\2\2\u01c2\u01c3\5^\60\2\u01c3\u01c4\7\5"+ - "\2\2\u01c4\u01c5\5j\66\2\u01c5\u01c6\5\60\31\2\u01c6\u01c7\7\4\2\2\u01c7"+ - "\u01d2\3\2\2\2\u01c8\u01c9\7;\2\2\u01c9\u01ca\7\3\2\2\u01ca\u01cb\5j\66"+ - "\2\u01cb\u01cc\7\5\2\2\u01cc\u01cd\5j\66\2\u01cd\u01ce\5\60\31\2\u01ce"+ - "\u01cf\7\4\2\2\u01cf\u01d2\3\2\2\2\u01d0\u01d2\5\62\32\2\u01d1\u01b2\3"+ - "\2\2\2\u01d1\u01b5\3\2\2\2\u01d1\u01ba\3\2\2\2\u01d1\u01c0\3\2\2\2\u01d1"+ - "\u01c8\3\2\2\2\u01d1\u01d0\3\2\2\2\u01d2\u01db\3\2\2\2\u01d3\u01d4\f\4"+ - "\2\2\u01d4\u01d5\7\n\2\2\u01d5\u01da\5.\30\5\u01d6\u01d7\f\3\2\2\u01d7"+ - "\u01d8\7F\2\2\u01d8\u01da\5.\30\4\u01d9\u01d3\3\2\2\2\u01d9\u01d6\3\2"+ - "\2\2\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc"+ - "/\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01df\7\5\2\2\u01df\u01e1\5j\66\2"+ - "\u01e0\u01de\3\2\2\2\u01e1\u01e4\3\2\2\2\u01e2\u01e0\3\2\2\2\u01e2\u01e3"+ - "\3\2\2\2\u01e3\61\3\2\2\2\u01e4\u01e2\3\2\2\2\u01e5\u01e7\5<\37\2\u01e6"+ - "\u01e8\5\64\33\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\63\3\2"+ - "\2\2\u01e9\u01eb\7A\2\2\u01ea\u01e9\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb"+ - "\u01ec\3\2\2\2\u01ec\u01ed\7\16\2\2\u01ed\u01ee\5<\37\2\u01ee\u01ef\7"+ - "\n\2\2\u01ef\u01f0\5<\37\2\u01f0\u0218\3\2\2\2\u01f1\u01f3\7A\2\2\u01f2"+ - "\u01f1\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7\61"+ - "\2\2\u01f5\u01f6\7\3\2\2\u01f6\u01fb\5<\37\2\u01f7\u01f8\7\5\2\2\u01f8"+ - "\u01fa\5<\37\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01f9\3\2"+ - "\2\2\u01fb\u01fc\3\2\2\2\u01fc\u01fe\3\2\2\2\u01fd\u01fb\3\2\2\2\u01fe"+ - "\u01ff\7\4\2\2\u01ff\u0218\3\2\2\2\u0200\u0202\7A\2\2\u0201\u0200\3\2"+ - "\2\2\u0201\u0202\3\2\2\2\u0202\u0203\3\2\2\2\u0203\u0204\7\61\2\2\u0204"+ - "\u0205\7\3\2\2\u0205\u0206\5\b\5\2\u0206\u0207\7\4\2\2\u0207\u0218\3\2"+ - "\2\2\u0208\u020a\7A\2\2\u0209\u0208\3\2\2\2\u0209\u020a\3\2\2\2\u020a"+ - "\u020b\3\2\2\2\u020b\u020c\78\2\2\u020c\u0218\58\35\2\u020d\u020f\7A\2"+ - "\2\u020e\u020d\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0211"+ - "\7M\2\2\u0211\u0218\5j\66\2\u0212\u0214\7\64\2\2\u0213\u0215\7A\2\2\u0214"+ - "\u0213\3\2\2\2\u0214\u0215\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0218\7B"+ - "\2\2\u0217\u01ea\3\2\2\2\u0217\u01f2\3\2\2\2\u0217\u0201\3\2\2\2\u0217"+ - "\u0209\3\2\2\2\u0217\u020e\3\2\2\2\u0217\u0212\3\2\2\2\u0218\65\3\2\2"+ - "\2\u0219\u021a\78\2\2\u021a\u021b\58\35\2\u021b\67\3\2\2\2\u021c\u021e"+ - "\5j\66\2\u021d\u021f\5:\36\2\u021e\u021d\3\2\2\2\u021e\u021f\3\2\2\2\u021f"+ - "9\3\2\2\2\u0220\u0221\7!\2\2\u0221\u0227\5j\66\2\u0222\u0223\7d\2\2\u0223"+ - "\u0224\5j\66\2\u0224\u0225\7k\2\2\u0225\u0227\3\2\2\2\u0226\u0220\3\2"+ - "\2\2\u0226\u0222\3\2\2\2\u0227;\3\2\2\2\u0228\u0229\b\37\1\2\u0229\u022d"+ - "\5> \2\u022a\u022b\t\7\2\2\u022b\u022d\5<\37\6\u022c\u0228\3\2\2\2\u022c"+ - "\u022a\3\2\2\2\u022d\u023a\3\2\2\2\u022e\u022f\f\5\2\2\u022f\u0230\t\f"+ - "\2\2\u0230\u0239\5<\37\6\u0231\u0232\f\4\2\2\u0232\u0233\t\7\2\2\u0233"+ - "\u0239\5<\37\5\u0234\u0235\f\3\2\2\u0235\u0236\5T+\2\u0236\u0237\5<\37"+ - "\4\u0237\u0239\3\2\2\2\u0238\u022e\3\2\2\2\u0238\u0231\3\2\2\2\u0238\u0234"+ - "\3\2\2\2\u0239\u023c\3\2\2\2\u023a\u0238\3\2\2\2\u023a\u023b\3\2\2\2\u023b"+ - "=\3\2\2\2\u023c\u023a\3\2\2\2\u023d\u023e\b \1\2\u023e\u0262\5B\"\2\u023f"+ - "\u0262\5H%\2\u0240\u0262\5@!\2\u0241\u0262\5R*\2\u0242\u0243\5^\60\2\u0243"+ - "\u0244\7z\2\2\u0244\u0246\3\2\2\2\u0245\u0242\3\2\2\2\u0245\u0246\3\2"+ - "\2\2\u0246\u0247\3\2\2\2\u0247\u0262\7u\2\2\u0248\u0262\5L\'\2\u0249\u024a"+ - "\7\3\2\2\u024a\u024b\5\b\5\2\u024b\u024c\7\4\2\2\u024c\u0262\3\2\2\2\u024d"+ - "\u0262\5^\60\2\u024e\u024f\7\3\2\2\u024f\u0250\5,\27\2\u0250\u0251\7\4"+ - "\2\2\u0251\u0262\3\2\2\2\u0252\u0254\7\20\2\2\u0253\u0255\5.\30\2\u0254"+ - "\u0253\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0257\3\2\2\2\u0256\u0258\5l"+ - "\67\2\u0257\u0256\3\2\2\2\u0258\u0259\3\2\2\2\u0259\u0257\3\2\2\2\u0259"+ - "\u025a\3\2\2\2\u025a\u025d\3\2\2\2\u025b\u025c\7\37\2\2\u025c\u025e\5"+ - ".\30\2\u025d\u025b\3\2\2\2\u025d\u025e\3\2\2\2\u025e\u025f\3\2\2\2\u025f"+ - "\u0260\7 \2\2\u0260\u0262\3\2\2\2\u0261\u023d\3\2\2\2\u0261\u023f\3\2"+ - "\2\2\u0261\u0240\3\2\2\2\u0261\u0241\3\2\2\2\u0261\u0245\3\2\2\2\u0261"+ - "\u0248\3\2\2\2\u0261\u0249\3\2\2\2\u0261\u024d\3\2\2\2\u0261\u024e\3\2"+ - "\2\2\u0261\u0252\3\2\2\2\u0262\u0268\3\2\2\2\u0263\u0264\f\f\2\2\u0264"+ - "\u0265\7x\2\2\u0265\u0267\5\\/\2\u0266\u0263\3\2\2\2\u0267\u026a\3\2\2"+ - "\2\u0268\u0266\3\2\2\2\u0268\u0269\3\2\2\2\u0269?\3\2\2\2\u026a\u0268"+ - "\3\2\2\2\u026b\u026f\7\30\2\2\u026c\u026f\7\26\2\2\u026d\u026f\7\27\2"+ - "\2\u026e\u026b\3\2\2\2\u026e\u026c\3\2\2\2\u026e\u026d\3\2\2\2\u026fA"+ - "\3\2\2\2\u0270\u027b\5D#\2\u0271\u0272\7e\2\2\u0272\u0273\5D#\2\u0273"+ - "\u0274\7k\2\2\u0274\u027b\3\2\2\2\u0275\u027b\5F$\2\u0276\u0277\7e\2\2"+ - "\u0277\u0278\5F$\2\u0278\u0279\7k\2\2\u0279\u027b\3\2\2\2\u027a\u0270"+ - "\3\2\2\2\u027a\u0271\3\2\2\2\u027a\u0275\3\2\2\2\u027a\u0276\3\2\2\2\u027b"+ - "C\3\2\2\2\u027c\u027d\7\21\2\2\u027d\u027e\7\3\2\2\u027e\u027f\5,\27\2"+ - "\u027f\u0280\7\f\2\2\u0280\u0281\5\\/\2\u0281\u0282\7\4\2\2\u0282E\3\2"+ - "\2\2\u0283\u0284\7\25\2\2\u0284\u0285\7\3\2\2\u0285\u0286\5,\27\2\u0286"+ - "\u0287\7\5\2\2\u0287\u0288\5\\/\2\u0288\u0289\7\4\2\2\u0289G\3\2\2\2\u028a"+ - "\u0290\5J&\2\u028b\u028c\7e\2\2\u028c\u028d\5J&\2\u028d\u028e\7k\2\2\u028e"+ - "\u0290\3\2\2\2\u028f\u028a\3\2\2\2\u028f\u028b\3\2\2\2\u0290I\3\2\2\2"+ - "\u0291\u0292\7%\2\2\u0292\u0293\7\3\2\2\u0293\u0294\5`\61\2\u0294\u0295"+ - "\7)\2\2\u0295\u0296\5<\37\2\u0296\u0297\7\4\2\2\u0297K\3\2\2\2\u0298\u029e"+ - "\5N(\2\u0299\u029a\7e\2\2\u029a\u029b\5N(\2\u029b\u029c\7k\2\2\u029c\u029e"+ - "\3\2\2\2\u029d\u0298\3\2\2\2\u029d\u0299\3\2\2\2\u029eM\3\2\2\2\u029f"+ - "\u02a0\5P)\2\u02a0\u02ac\7\3\2\2\u02a1\u02a3\5\36\20\2\u02a2\u02a1\3\2"+ - "\2\2\u02a2\u02a3\3\2\2\2\u02a3\u02a4\3\2\2\2\u02a4\u02a9\5,\27\2\u02a5"+ - "\u02a6\7\5\2\2\u02a6\u02a8\5,\27\2\u02a7\u02a5\3\2\2\2\u02a8\u02ab\3\2"+ - "\2\2\u02a9\u02a7\3\2\2\2\u02a9\u02aa\3\2\2\2\u02aa\u02ad\3\2\2\2\u02ab"+ - "\u02a9\3\2\2\2\u02ac\u02a2\3\2\2\2\u02ac\u02ad\3\2\2\2\u02ad\u02ae\3\2"+ - "\2\2\u02ae\u02af\7\4\2\2\u02afO\3\2\2\2\u02b0\u02b4\7\67\2\2\u02b1\u02b4"+ - "\7L\2\2\u02b2\u02b4\5`\61\2\u02b3\u02b0\3\2\2\2\u02b3\u02b1\3\2\2\2\u02b3"+ - "\u02b2\3\2\2\2\u02b4Q\3\2\2\2\u02b5\u02d0\7B\2\2\u02b6\u02d0\5X-\2\u02b7"+ - "\u02d0\5h\65\2\u02b8\u02d0\5V,\2\u02b9\u02bb\7|\2\2\u02ba\u02b9\3\2\2"+ - "\2\u02bb\u02bc\3\2\2\2\u02bc\u02ba\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02d0"+ - "\3\2\2\2\u02be\u02d0\7{\2\2\u02bf\u02c0\7g\2\2\u02c0\u02c1\5j\66\2\u02c1"+ - "\u02c2\7k\2\2\u02c2\u02d0\3\2\2\2\u02c3\u02c4\7h\2\2\u02c4\u02c5\5j\66"+ - "\2\u02c5\u02c6\7k\2\2\u02c6\u02d0\3\2\2\2\u02c7\u02c8\7i\2\2\u02c8\u02c9"+ - "\5j\66\2\u02c9\u02ca\7k\2\2\u02ca\u02d0\3\2\2\2\u02cb\u02cc\7j\2\2\u02cc"+ - "\u02cd\5j\66\2\u02cd\u02ce\7k\2\2\u02ce\u02d0\3\2\2\2\u02cf\u02b5\3\2"+ - "\2\2\u02cf\u02b6\3\2\2\2\u02cf\u02b7\3\2\2\2\u02cf\u02b8\3\2\2\2\u02cf"+ - "\u02ba\3\2\2\2\u02cf\u02be\3\2\2\2\u02cf\u02bf\3\2\2\2\u02cf\u02c3\3\2"+ - "\2\2\u02cf\u02c7\3\2\2\2\u02cf\u02cb\3\2\2\2\u02d0S\3\2\2\2\u02d1\u02d2"+ - "\t\r\2\2\u02d2U\3\2\2\2\u02d3\u02d4\t\16\2\2\u02d4W\3\2\2\2\u02d5\u02d7"+ - "\7\63\2\2\u02d6\u02d8\t\7\2\2\u02d7\u02d6\3\2\2\2\u02d7\u02d8\3\2\2\2"+ - "\u02d8\u02db\3\2\2\2\u02d9\u02dc\5h\65\2\u02da\u02dc\5j\66\2\u02db\u02d9"+ - "\3\2\2\2\u02db\u02da\3\2\2\2\u02dc\u02dd\3\2\2\2\u02dd\u02e0\5Z.\2\u02de"+ - "\u02df\7Z\2\2\u02df\u02e1\5Z.\2\u02e0\u02de\3\2\2\2\u02e0\u02e1\3\2\2"+ - "\2\u02e1Y\3\2\2\2\u02e2\u02e3\t\17\2\2\u02e3[\3\2\2\2\u02e4\u02e5\5`\61"+ - "\2\u02e5]\3\2\2\2\u02e6\u02e7\5`\61\2\u02e7\u02e8\7z\2\2\u02e8\u02ea\3"+ - "\2\2\2\u02e9\u02e6\3\2\2\2\u02ea\u02ed\3\2\2\2\u02eb\u02e9\3\2\2\2\u02eb"+ - "\u02ec\3\2\2\2\u02ec\u02ee\3\2\2\2\u02ed\u02eb\3\2\2\2\u02ee\u02ef\5`"+ - "\61\2\u02ef_\3\2\2\2\u02f0\u02f3\5d\63\2\u02f1\u02f3\5f\64\2\u02f2\u02f0"+ - "\3\2\2\2\u02f2\u02f1\3\2\2\2\u02f3a\3\2\2\2\u02f4\u02f5\5`\61\2\u02f5"+ - "\u02f6\7\6\2\2\u02f6\u02f8\3\2\2\2\u02f7\u02f4\3\2\2\2\u02f7\u02f8\3\2"+ - "\2\2\u02f8\u02f9\3\2\2\2\u02f9\u0301\7\u0081\2\2\u02fa\u02fb\5`\61\2\u02fb"+ - "\u02fc\7\6\2\2\u02fc\u02fe\3\2\2\2\u02fd\u02fa\3\2\2\2\u02fd\u02fe\3\2"+ - "\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\5`\61\2\u0300\u02f7\3\2\2\2\u0300"+ - "\u02fd\3\2\2\2\u0301c\3\2\2\2\u0302\u0305\7\u0082\2\2\u0303\u0305\7\u0083"+ - "\2\2\u0304\u0302\3\2\2\2\u0304\u0303\3\2\2\2\u0305e\3\2\2\2\u0306\u030a"+ - "\7\177\2\2\u0307\u030a\5n8\2\u0308\u030a\7\u0080\2\2\u0309\u0306\3\2\2"+ - "\2\u0309\u0307\3\2\2\2\u0309\u0308\3\2\2\2\u030ag\3\2\2\2\u030b\u030e"+ - "\7~\2\2\u030c\u030e\7}\2\2\u030d\u030b\3\2\2\2\u030d\u030c\3\2\2\2\u030e"+ - "i\3\2\2\2\u030f\u0310\t\20\2\2\u0310k\3\2\2\2\u0311\u0312\7_\2\2\u0312"+ - "\u0313\5,\27\2\u0313\u0314\7X\2\2\u0314\u0315\5,\27\2\u0315m\3\2\2\2\u0316"+ - "\u0317\t\21\2\2\u0317o\3\2\2\2k\177\u0081\u0085\u008e\u0090\u0094\u009b"+ - "\u00a2\u00a7\u00ac\u00b4\u00b8\u00c0\u00c3\u00c9\u00ce\u00d1\u00d6\u00d9"+ - "\u00db\u00e3\u00e6\u00f2\u00f5\u00f8\u00ff\u0106\u010a\u010e\u0112\u0119"+ - "\u011d\u0121\u0126\u012a\u0132\u0136\u013d\u0148\u014b\u014f\u015b\u015e"+ - "\u0164\u016b\u0172\u0175\u0179\u017d\u0181\u0183\u018e\u0193\u0197\u019a"+ - "\u01a0\u01a3\u01a9\u01ac\u01ae\u01d1\u01d9\u01db\u01e2\u01e7\u01ea\u01f2"+ - "\u01fb\u0201\u0209\u020e\u0214\u0217\u021e\u0226\u022c\u0238\u023a\u0245"+ - "\u0254\u0259\u025d\u0261\u0268\u026e\u027a\u028f\u029d\u02a2\u02a9\u02ac"+ - "\u02b3\u02bc\u02cf\u02d7\u02db\u02e0\u02eb\u02f2\u02f7\u02fd\u0300\u0304"+ - "\u0309\u030d"; + "\2\2\u0094\u0095\3\2\2\2\u0095\u0096\3\2\2\2\u0096\u00e8\5\6\4\2\u0097"+ + "\u0098\7U\2\2\u0098\u009b\7X\2\2\u0099\u009a\7\63\2\2\u009a\u009c\7*\2"+ + "\2\u009b\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u00a0"+ + "\5\66\34\2\u009e\u00a0\5b\62\2\u009f\u009d\3\2\2\2\u009f\u009e\3\2\2\2"+ + "\u009f\u00a0\3\2\2\2\u00a0\u00e8\3\2\2\2\u00a1\u00a2\7U\2\2\u00a2\u00a5"+ + "\7\24\2\2\u00a3\u00a4\7\63\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a3\3\2\2\2"+ + "\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00aa\t\5\2\2\u00a8\u00ab"+ + "\5\66\34\2\u00a9\u00ab\5b\62\2\u00aa\u00a8\3\2\2\2\u00aa\u00a9\3\2\2\2"+ + "\u00ab\u00e8\3\2\2\2\u00ac\u00af\t\6\2\2\u00ad\u00ae\7\63\2\2\u00ae\u00b0"+ + "\7*\2\2\u00af\u00ad\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b3\3\2\2\2\u00b1"+ + "\u00b4\5\66\34\2\u00b2\u00b4\5b\62\2\u00b3\u00b1\3\2\2\2\u00b3\u00b2\3"+ + "\2\2\2\u00b4\u00e8\3\2\2\2\u00b5\u00b6\7U\2\2\u00b6\u00b8\7,\2\2\u00b7"+ + "\u00b9\5\66\34\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00e8\3"+ + "\2\2\2\u00ba\u00bb\7U\2\2\u00bb\u00e8\7Q\2\2\u00bc\u00bd\7V\2\2\u00bd"+ + "\u00c0\7X\2\2\u00be\u00bf\7\22\2\2\u00bf\u00c1\5\66\34\2\u00c0\u00be\3"+ + "\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c5\5\66\34\2\u00c3"+ + "\u00c5\5b\62\2\u00c4\u00c2\3\2\2\2\u00c4\u00c3\3\2\2\2\u00c4\u00c5\3\2"+ + "\2\2\u00c5\u00cf\3\2\2\2\u00c6\u00c7\7]\2\2\u00c7\u00cc\5j\66\2\u00c8"+ + "\u00c9\7\5\2\2\u00c9\u00cb\5j\66\2\u00ca\u00c8\3\2\2\2\u00cb\u00ce\3\2"+ + "\2\2\u00cc\u00ca\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d0\3\2\2\2\u00ce"+ + "\u00cc\3\2\2\2\u00cf\u00c6\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0\u00e8\3\2"+ + "\2\2\u00d1\u00d2\7V\2\2\u00d2\u00d5\7\24\2\2\u00d3\u00d4\7\22\2\2\u00d4"+ + "\u00d6\5j\66\2\u00d5\u00d3\3\2\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00da\3\2"+ + "\2\2\u00d7\u00d8\7W\2\2\u00d8\u00db\5\66\34\2\u00d9\u00db\5b\62\2\u00da"+ + "\u00d7\3\2\2\2\u00da\u00d9\3\2\2\2\u00da\u00db\3\2\2\2\u00db\u00dd\3\2"+ + "\2\2\u00dc\u00de\5\66\34\2\u00dd\u00dc\3\2\2\2\u00dd\u00de\3\2\2\2\u00de"+ + "\u00e8\3\2\2\2\u00df\u00e0\7V\2\2\u00e0\u00e5\7^\2\2\u00e1\u00e3\t\7\2"+ + "\2\u00e2\u00e1\3\2\2\2\u00e2\u00e3\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4\u00e6"+ + "\5h\65\2\u00e5\u00e2\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2\u00e7"+ + "v\3\2\2\2\u00e7w\3\2\2\2\u00e7\u0088\3\2\2\2\u00e7\u0097\3\2\2\2\u00e7"+ + "\u00a1\3\2\2\2\u00e7\u00ac\3\2\2\2\u00e7\u00b5\3\2\2\2\u00e7\u00ba\3\2"+ + "\2\2\u00e7\u00bc\3\2\2\2\u00e7\u00d1\3\2\2\2\u00e7\u00df\3\2\2\2\u00e8"+ + "\7\3\2\2\2\u00e9\u00ea\7c\2\2\u00ea\u00ef\5\34\17\2\u00eb\u00ec\7\5\2"+ + "\2\u00ec\u00ee\5\34\17\2\u00ed\u00eb\3\2\2\2\u00ee\u00f1\3\2\2\2\u00ef"+ + "\u00ed\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0\u00f3\3\2\2\2\u00f1\u00ef\3\2"+ + "\2\2\u00f2\u00e9\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4"+ + "\u00f5\5\n\6\2\u00f5\t\3\2\2\2\u00f6\u0101\5\16\b\2\u00f7\u00f8\7I\2\2"+ + "\u00f8\u00f9\7\17\2\2\u00f9\u00fe\5\20\t\2\u00fa\u00fb\7\5\2\2\u00fb\u00fd"+ + "\5\20\t\2\u00fc\u00fa\3\2\2\2\u00fd\u0100\3\2\2\2\u00fe\u00fc\3\2\2\2"+ + "\u00fe\u00ff\3\2\2\2\u00ff\u0102\3\2\2\2\u0100\u00fe\3\2\2\2\u0101\u00f7"+ + "\3\2\2\2\u0101\u0102\3\2\2\2\u0102\u0104\3\2\2\2\u0103\u0105\5\f\7\2\u0104"+ + "\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105\13\3\2\2\2\u0106\u0107\7;\2\2"+ + "\u0107\u010c\t\b\2\2\u0108\u0109\7h\2\2\u0109\u010a\t\b\2\2\u010a\u010c"+ + "\7m\2\2\u010b\u0106\3\2\2\2\u010b\u0108\3\2\2\2\u010c\r\3\2\2\2\u010d"+ + "\u0113\5\22\n\2\u010e\u010f\7\3\2\2\u010f\u0110\5\n\6\2\u0110\u0111\7"+ + "\4\2\2\u0111\u0113\3\2\2\2\u0112\u010d\3\2\2\2\u0112\u010e\3\2\2\2\u0113"+ + "\17\3\2\2\2\u0114\u0116\5,\27\2\u0115\u0117\t\t\2\2\u0116\u0115\3\2\2"+ + "\2\u0116\u0117\3\2\2\2\u0117\u011a\3\2\2\2\u0118\u0119\7E\2\2\u0119\u011b"+ + "\t\n\2\2\u011a\u0118\3\2\2\2\u011a\u011b\3\2\2\2\u011b\21\3\2\2\2\u011c"+ + "\u011e\7T\2\2\u011d\u011f\5\36\20\2\u011e\u011d\3\2\2\2\u011e\u011f\3"+ + "\2\2\2\u011f\u0120\3\2\2\2\u0120\u0125\5 \21\2\u0121\u0122\7\5\2\2\u0122"+ + "\u0124\5 \21\2\u0123\u0121\3\2\2\2\u0124\u0127\3\2\2\2\u0125\u0123\3\2"+ + "\2\2\u0125\u0126\3\2\2\2\u0126\u0129\3\2\2\2\u0127\u0125\3\2\2\2\u0128"+ + "\u012a\5\24\13\2\u0129\u0128\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012d\3"+ + "\2\2\2\u012b\u012c\7b\2\2\u012c\u012e\5.\30\2\u012d\u012b\3\2\2\2\u012d"+ + "\u012e\3\2\2\2\u012e\u0132\3\2\2\2\u012f\u0130\7.\2\2\u0130\u0131\7\17"+ + "\2\2\u0131\u0133\5\26\f\2\u0132\u012f\3\2\2\2\u0132\u0133\3\2\2\2\u0133"+ + "\u0136\3\2\2\2\u0134\u0135\7/\2\2\u0135\u0137\5.\30\2\u0136\u0134\3\2"+ + "\2\2\u0136\u0137\3\2\2\2\u0137\23\3\2\2\2\u0138\u0139\7)\2\2\u0139\u013e"+ + "\5\"\22\2\u013a\u013b\7\5\2\2\u013b\u013d\5\"\22\2\u013c\u013a\3\2\2\2"+ + "\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2\2\2\u013f\25"+ + "\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0143\5\36\20\2\u0142\u0141\3\2\2\2"+ + "\u0142\u0143\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0149\5\30\r\2\u0145\u0146"+ + "\7\5\2\2\u0146\u0148\5\30\r\2\u0147\u0145\3\2\2\2\u0148\u014b\3\2\2\2"+ + "\u0149\u0147\3\2\2\2\u0149\u014a\3\2\2\2\u014a\27\3\2\2\2\u014b\u0149"+ + "\3\2\2\2\u014c\u014d\5\32\16\2\u014d\31\3\2\2\2\u014e\u0157\7\3\2\2\u014f"+ + "\u0154\5,\27\2\u0150\u0151\7\5\2\2\u0151\u0153\5,\27\2\u0152\u0150\3\2"+ + "\2\2\u0153\u0156\3\2\2\2\u0154\u0152\3\2\2\2\u0154\u0155\3\2\2\2\u0155"+ + "\u0158\3\2\2\2\u0156\u0154\3\2\2\2\u0157\u014f\3\2\2\2\u0157\u0158\3\2"+ + "\2\2\u0158\u0159\3\2\2\2\u0159\u015c\7\4\2\2\u015a\u015c\5,\27\2\u015b"+ + "\u014e\3\2\2\2\u015b\u015a\3\2\2\2\u015c\33\3\2\2\2\u015d\u015e\5`\61"+ + "\2\u015e\u015f\7\f\2\2\u015f\u0160\7\3\2\2\u0160\u0161\5\n\6\2\u0161\u0162"+ + "\7\4\2\2\u0162\35\3\2\2\2\u0163\u0164\t\13\2\2\u0164\37\3\2\2\2\u0165"+ + "\u016a\5,\27\2\u0166\u0168\7\f\2\2\u0167\u0166\3\2\2\2\u0167\u0168\3\2"+ + "\2\2\u0168\u0169\3\2\2\2\u0169\u016b\5`\61\2\u016a\u0167\3\2\2\2\u016a"+ + "\u016b\3\2\2\2\u016b!\3\2\2\2\u016c\u0170\5*\26\2\u016d\u016f\5$\23\2"+ + "\u016e\u016d\3\2\2\2\u016f\u0172\3\2\2\2\u0170\u016e\3\2\2\2\u0170\u0171"+ + "\3\2\2\2\u0171#\3\2\2\2\u0172\u0170\3\2\2\2\u0173\u0174\5&\24\2\u0174"+ + "\u0175\7\67\2\2\u0175\u0177\5*\26\2\u0176\u0178\5(\25\2\u0177\u0176\3"+ + "\2\2\2\u0177\u0178\3\2\2\2\u0178\u017f\3\2\2\2\u0179\u017a\7B\2\2\u017a"+ + "\u017b\5&\24\2\u017b\u017c\7\67\2\2\u017c\u017d\5*\26\2\u017d\u017f\3"+ + "\2\2\2\u017e\u0173\3\2\2\2\u017e\u0179\3\2\2\2\u017f%\3\2\2\2\u0180\u0182"+ + "\7\64\2\2\u0181\u0180\3\2\2\2\u0181\u0182\3\2\2\2\u0182\u0190\3\2\2\2"+ + "\u0183\u0185\79\2\2\u0184\u0186\7J\2\2\u0185\u0184\3\2\2\2\u0185\u0186"+ + "\3\2\2\2\u0186\u0190\3\2\2\2\u0187\u0189\7N\2\2\u0188\u018a\7J\2\2\u0189"+ + "\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0190\3\2\2\2\u018b\u018d\7+"+ + "\2\2\u018c\u018e\7J\2\2\u018d\u018c\3\2\2\2\u018d\u018e\3\2\2\2\u018e"+ + "\u0190\3\2\2\2\u018f\u0181\3\2\2\2\u018f\u0183\3\2\2\2\u018f\u0187\3\2"+ + "\2\2\u018f\u018b\3\2\2\2\u0190\'\3\2\2\2\u0191\u0192\7F\2\2\u0192\u01a0"+ + "\5.\30\2\u0193\u0194\7_\2\2\u0194\u0195\7\3\2\2\u0195\u019a\5`\61\2\u0196"+ + "\u0197\7\5\2\2\u0197\u0199\5`\61\2\u0198\u0196\3\2\2\2\u0199\u019c\3\2"+ + "\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019d\3\2\2\2\u019c"+ + "\u019a\3\2\2\2\u019d\u019e\7\4\2\2\u019e\u01a0\3\2\2\2\u019f\u0191\3\2"+ + "\2\2\u019f\u0193\3\2\2\2\u01a0)\3\2\2\2\u01a1\u01a3\7*\2\2\u01a2\u01a1"+ + "\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a9\5b\62\2\u01a5"+ + "\u01a7\7\f\2\2\u01a6\u01a5\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01a8\3\2"+ + "\2\2\u01a8\u01aa\5^\60\2\u01a9\u01a6\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa"+ + "\u01be\3\2\2\2\u01ab\u01ac\7\3\2\2\u01ac\u01ad\5\n\6\2\u01ad\u01b2\7\4"+ + "\2\2\u01ae\u01b0\7\f\2\2\u01af\u01ae\3\2\2\2\u01af\u01b0\3\2\2\2\u01b0"+ + "\u01b1\3\2\2\2\u01b1\u01b3\5^\60\2\u01b2\u01af\3\2\2\2\u01b2\u01b3\3\2"+ + "\2\2\u01b3\u01be\3\2\2\2\u01b4\u01b5\7\3\2\2\u01b5\u01b6\5\"\22\2\u01b6"+ + "\u01bb\7\4\2\2\u01b7\u01b9\7\f\2\2\u01b8\u01b7\3\2\2\2\u01b8\u01b9\3\2"+ + "\2\2\u01b9\u01ba\3\2\2\2\u01ba\u01bc\5^\60\2\u01bb\u01b8\3\2\2\2\u01bb"+ + "\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd\u01a2\3\2\2\2\u01bd\u01ab\3\2"+ + "\2\2\u01bd\u01b4\3\2\2\2\u01be+\3\2\2\2\u01bf\u01c0\5.\30\2\u01c0-\3\2"+ + "\2\2\u01c1\u01c2\b\30\1\2\u01c2\u01c3\7C\2\2\u01c3\u01e1\5.\30\n\u01c4"+ + "\u01c5\7#\2\2\u01c5\u01c6\7\3\2\2\u01c6\u01c7\5\b\5\2\u01c7\u01c8\7\4"+ + "\2\2\u01c8\u01e1\3\2\2\2\u01c9\u01ca\7P\2\2\u01ca\u01cb\7\3\2\2\u01cb"+ + "\u01cc\5j\66\2\u01cc\u01cd\5\60\31\2\u01cd\u01ce\7\4\2\2\u01ce\u01e1\3"+ + "\2\2\2\u01cf\u01d0\7=\2\2\u01d0\u01d1\7\3\2\2\u01d1\u01d2\5^\60\2\u01d2"+ + "\u01d3\7\5\2\2\u01d3\u01d4\5j\66\2\u01d4\u01d5\5\60\31\2\u01d5\u01d6\7"+ + "\4\2\2\u01d6\u01e1\3\2\2\2\u01d7\u01d8\7=\2\2\u01d8\u01d9\7\3\2\2\u01d9"+ + "\u01da\5j\66\2\u01da\u01db\7\5\2\2\u01db\u01dc\5j\66\2\u01dc\u01dd\5\60"+ + "\31\2\u01dd\u01de\7\4\2\2\u01de\u01e1\3\2\2\2\u01df\u01e1\5\62\32\2\u01e0"+ + "\u01c1\3\2\2\2\u01e0\u01c4\3\2\2\2\u01e0\u01c9\3\2\2\2\u01e0\u01cf\3\2"+ + "\2\2\u01e0\u01d7\3\2\2\2\u01e0\u01df\3\2\2\2\u01e1\u01ea\3\2\2\2\u01e2"+ + "\u01e3\f\4\2\2\u01e3\u01e4\7\n\2\2\u01e4\u01e9\5.\30\5\u01e5\u01e6\f\3"+ + "\2\2\u01e6\u01e7\7H\2\2\u01e7\u01e9\5.\30\4\u01e8\u01e2\3\2\2\2\u01e8"+ + "\u01e5\3\2\2\2\u01e9\u01ec\3\2\2\2\u01ea\u01e8\3\2\2\2\u01ea\u01eb\3\2"+ + "\2\2\u01eb/\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ed\u01ee\7\5\2\2\u01ee\u01f0"+ + "\5j\66\2\u01ef\u01ed\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f1"+ + "\u01f2\3\2\2\2\u01f2\61\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u01f6\5<\37"+ + "\2\u01f5\u01f7\5\64\33\2\u01f6\u01f5\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7"+ + "\63\3\2\2\2\u01f8\u01fa\7C\2\2\u01f9\u01f8\3\2\2\2\u01f9\u01fa\3\2\2\2"+ + "\u01fa\u01fb\3\2\2\2\u01fb\u01fc\7\16\2\2\u01fc\u01fd\5<\37\2\u01fd\u01fe"+ + "\7\n\2\2\u01fe\u01ff\5<\37\2\u01ff\u0227\3\2\2\2\u0200\u0202\7C\2\2\u0201"+ + "\u0200\3\2\2\2\u0201\u0202\3\2\2\2\u0202\u0203\3\2\2\2\u0203\u0204\7\62"+ + "\2\2\u0204\u0205\7\3\2\2\u0205\u020a\5<\37\2\u0206\u0207\7\5\2\2\u0207"+ + "\u0209\5<\37\2\u0208\u0206\3\2\2\2\u0209\u020c\3\2\2\2\u020a\u0208\3\2"+ + "\2\2\u020a\u020b\3\2\2\2\u020b\u020d\3\2\2\2\u020c\u020a\3\2\2\2\u020d"+ + "\u020e\7\4\2\2\u020e\u0227\3\2\2\2\u020f\u0211\7C\2\2\u0210\u020f\3\2"+ + "\2\2\u0210\u0211\3\2\2\2\u0211\u0212\3\2\2\2\u0212\u0213\7\62\2\2\u0213"+ + "\u0214\7\3\2\2\u0214\u0215\5\b\5\2\u0215\u0216\7\4\2\2\u0216\u0227\3\2"+ + "\2\2\u0217\u0219\7C\2\2\u0218\u0217\3\2\2\2\u0218\u0219\3\2\2\2\u0219"+ + "\u021a\3\2\2\2\u021a\u021b\7:\2\2\u021b\u0227\58\35\2\u021c\u021e\7C\2"+ + "\2\u021d\u021c\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0220"+ + "\7O\2\2\u0220\u0227\5j\66\2\u0221\u0223\7\66\2\2\u0222\u0224\7C\2\2\u0223"+ + "\u0222\3\2\2\2\u0223\u0224\3\2\2\2\u0224\u0225\3\2\2\2\u0225\u0227\7D"+ + "\2\2\u0226\u01f9\3\2\2\2\u0226\u0201\3\2\2\2\u0226\u0210\3\2\2\2\u0226"+ + "\u0218\3\2\2\2\u0226\u021d\3\2\2\2\u0226\u0221\3\2\2\2\u0227\65\3\2\2"+ + "\2\u0228\u0229\7:\2\2\u0229\u022a\58\35\2\u022a\67\3\2\2\2\u022b\u022d"+ + "\5j\66\2\u022c\u022e\5:\36\2\u022d\u022c\3\2\2\2\u022d\u022e\3\2\2\2\u022e"+ + "9\3\2\2\2\u022f\u0230\7!\2\2\u0230\u0236\5j\66\2\u0231\u0232\7f\2\2\u0232"+ + "\u0233\5j\66\2\u0233\u0234\7m\2\2\u0234\u0236\3\2\2\2\u0235\u022f\3\2"+ + "\2\2\u0235\u0231\3\2\2\2\u0236;\3\2\2\2\u0237\u0238\b\37\1\2\u0238\u023c"+ + "\5> \2\u0239\u023a\t\7\2\2\u023a\u023c\5<\37\6\u023b\u0237\3\2\2\2\u023b"+ + "\u0239\3\2\2\2\u023c\u0249\3\2\2\2\u023d\u023e\f\5\2\2\u023e\u023f\t\f"+ + "\2\2\u023f\u0248\5<\37\6\u0240\u0241\f\4\2\2\u0241\u0242\t\7\2\2\u0242"+ + "\u0248\5<\37\5\u0243\u0244\f\3\2\2\u0244\u0245\5T+\2\u0245\u0246\5<\37"+ + "\4\u0246\u0248\3\2\2\2\u0247\u023d\3\2\2\2\u0247\u0240\3\2\2\2\u0247\u0243"+ + "\3\2\2\2\u0248\u024b\3\2\2\2\u0249\u0247\3\2\2\2\u0249\u024a\3\2\2\2\u024a"+ + "=\3\2\2\2\u024b\u0249\3\2\2\2\u024c\u024d\b \1\2\u024d\u0271\5B\"\2\u024e"+ + "\u0271\5H%\2\u024f\u0271\5@!\2\u0250\u0271\5R*\2\u0251\u0252\5^\60\2\u0252"+ + "\u0253\7|\2\2\u0253\u0255\3\2\2\2\u0254\u0251\3\2\2\2\u0254\u0255\3\2"+ + "\2\2\u0255\u0256\3\2\2\2\u0256\u0271\7w\2\2\u0257\u0271\5L\'\2\u0258\u0259"+ + "\7\3\2\2\u0259\u025a\5\b\5\2\u025a\u025b\7\4\2\2\u025b\u0271\3\2\2\2\u025c"+ + "\u0271\5^\60\2\u025d\u025e\7\3\2\2\u025e\u025f\5,\27\2\u025f\u0260\7\4"+ + "\2\2\u0260\u0271\3\2\2\2\u0261\u0263\7\20\2\2\u0262\u0264\5.\30\2\u0263"+ + "\u0262\3\2\2\2\u0263\u0264\3\2\2\2\u0264\u0266\3\2\2\2\u0265\u0267\5l"+ + "\67\2\u0266\u0265\3\2\2\2\u0267\u0268\3\2\2\2\u0268\u0266\3\2\2\2\u0268"+ + "\u0269\3\2\2\2\u0269\u026c\3\2\2\2\u026a\u026b\7\37\2\2\u026b\u026d\5"+ + ".\30\2\u026c\u026a\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e"+ + "\u026f\7 \2\2\u026f\u0271\3\2\2\2\u0270\u024c\3\2\2\2\u0270\u024e\3\2"+ + "\2\2\u0270\u024f\3\2\2\2\u0270\u0250\3\2\2\2\u0270\u0254\3\2\2\2\u0270"+ + "\u0257\3\2\2\2\u0270\u0258\3\2\2\2\u0270\u025c\3\2\2\2\u0270\u025d\3\2"+ + "\2\2\u0270\u0261\3\2\2\2\u0271\u0277\3\2\2\2\u0272\u0273\f\f\2\2\u0273"+ + "\u0274\7z\2\2\u0274\u0276\5\\/\2\u0275\u0272\3\2\2\2\u0276\u0279\3\2\2"+ + "\2\u0277\u0275\3\2\2\2\u0277\u0278\3\2\2\2\u0278?\3\2\2\2\u0279\u0277"+ + "\3\2\2\2\u027a\u027e\7\30\2\2\u027b\u027e\7\26\2\2\u027c\u027e\7\27\2"+ + "\2\u027d\u027a\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027c\3\2\2\2\u027eA"+ + "\3\2\2\2\u027f\u028a\5D#\2\u0280\u0281\7g\2\2\u0281\u0282\5D#\2\u0282"+ + "\u0283\7m\2\2\u0283\u028a\3\2\2\2\u0284\u028a\5F$\2\u0285\u0286\7g\2\2"+ + "\u0286\u0287\5F$\2\u0287\u0288\7m\2\2\u0288\u028a\3\2\2\2\u0289\u027f"+ + "\3\2\2\2\u0289\u0280\3\2\2\2\u0289\u0284\3\2\2\2\u0289\u0285\3\2\2\2\u028a"+ + "C\3\2\2\2\u028b\u028c\7\21\2\2\u028c\u028d\7\3\2\2\u028d\u028e\5,\27\2"+ + "\u028e\u028f\7\f\2\2\u028f\u0290\5\\/\2\u0290\u0291\7\4\2\2\u0291E\3\2"+ + "\2\2\u0292\u0293\7\25\2\2\u0293\u0294\7\3\2\2\u0294\u0295\5,\27\2\u0295"+ + "\u0296\7\5\2\2\u0296\u0297\5\\/\2\u0297\u0298\7\4\2\2\u0298G\3\2\2\2\u0299"+ + "\u029f\5J&\2\u029a\u029b\7g\2\2\u029b\u029c\5J&\2\u029c\u029d\7m\2\2\u029d"+ + "\u029f\3\2\2\2\u029e\u0299\3\2\2\2\u029e\u029a\3\2\2\2\u029fI\3\2\2\2"+ + "\u02a0\u02a1\7%\2\2\u02a1\u02a2\7\3\2\2\u02a2\u02a3\5`\61\2\u02a3\u02a4"+ + "\7)\2\2\u02a4\u02a5\5<\37\2\u02a5\u02a6\7\4\2\2\u02a6K\3\2\2\2\u02a7\u02ad"+ + "\5N(\2\u02a8\u02a9\7g\2\2\u02a9\u02aa\5N(\2\u02aa\u02ab\7m\2\2\u02ab\u02ad"+ + "\3\2\2\2\u02ac\u02a7\3\2\2\2\u02ac\u02a8\3\2\2\2\u02adM\3\2\2\2\u02ae"+ + "\u02af\5P)\2\u02af\u02bb\7\3\2\2\u02b0\u02b2\5\36\20\2\u02b1\u02b0\3\2"+ + "\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b3\3\2\2\2\u02b3\u02b8\5,\27\2\u02b4"+ + "\u02b5\7\5\2\2\u02b5\u02b7\5,\27\2\u02b6\u02b4\3\2\2\2\u02b7\u02ba\3\2"+ + "\2\2\u02b8\u02b6\3\2\2\2\u02b8\u02b9\3\2\2\2\u02b9\u02bc\3\2\2\2\u02ba"+ + "\u02b8\3\2\2\2\u02bb\u02b1\3\2\2\2\u02bb\u02bc\3\2\2\2\u02bc\u02bd\3\2"+ + "\2\2\u02bd\u02be\7\4\2\2\u02beO\3\2\2\2\u02bf\u02c3\79\2\2\u02c0\u02c3"+ + "\7N\2\2\u02c1\u02c3\5`\61\2\u02c2\u02bf\3\2\2\2\u02c2\u02c0\3\2\2\2\u02c2"+ + "\u02c1\3\2\2\2\u02c3Q\3\2\2\2\u02c4\u02df\7D\2\2\u02c5\u02df\5X-\2\u02c6"+ + "\u02df\5h\65\2\u02c7\u02df\5V,\2\u02c8\u02ca\7~\2\2\u02c9\u02c8\3\2\2"+ + "\2\u02ca\u02cb\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02cc\3\2\2\2\u02cc\u02df"+ + "\3\2\2\2\u02cd\u02df\7}\2\2\u02ce\u02cf\7i\2\2\u02cf\u02d0\5j\66\2\u02d0"+ + "\u02d1\7m\2\2\u02d1\u02df\3\2\2\2\u02d2\u02d3\7j\2\2\u02d3\u02d4\5j\66"+ + "\2\u02d4\u02d5\7m\2\2\u02d5\u02df\3\2\2\2\u02d6\u02d7\7k\2\2\u02d7\u02d8"+ + "\5j\66\2\u02d8\u02d9\7m\2\2\u02d9\u02df\3\2\2\2\u02da\u02db\7l\2\2\u02db"+ + "\u02dc\5j\66\2\u02dc\u02dd\7m\2\2\u02dd\u02df\3\2\2\2\u02de\u02c4\3\2"+ + "\2\2\u02de\u02c5\3\2\2\2\u02de\u02c6\3\2\2\2\u02de\u02c7\3\2\2\2\u02de"+ + "\u02c9\3\2\2\2\u02de\u02cd\3\2\2\2\u02de\u02ce\3\2\2\2\u02de\u02d2\3\2"+ + "\2\2\u02de\u02d6\3\2\2\2\u02de\u02da\3\2\2\2\u02dfS\3\2\2\2\u02e0\u02e1"+ + "\t\r\2\2\u02e1U\3\2\2\2\u02e2\u02e3\t\16\2\2\u02e3W\3\2\2\2\u02e4\u02e6"+ + "\7\65\2\2\u02e5\u02e7\t\7\2\2\u02e6\u02e5\3\2\2\2\u02e6\u02e7\3\2\2\2"+ + "\u02e7\u02ea\3\2\2\2\u02e8\u02eb\5h\65\2\u02e9\u02eb\5j\66\2\u02ea\u02e8"+ + "\3\2\2\2\u02ea\u02e9\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ef\5Z.\2\u02ed"+ + "\u02ee\7\\\2\2\u02ee\u02f0\5Z.\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2"+ + "\2\u02f0Y\3\2\2\2\u02f1\u02f2\t\17\2\2\u02f2[\3\2\2\2\u02f3\u02f4\5`\61"+ + "\2\u02f4]\3\2\2\2\u02f5\u02f6\5`\61\2\u02f6\u02f7\7|\2\2\u02f7\u02f9\3"+ + "\2\2\2\u02f8\u02f5\3\2\2\2\u02f9\u02fc\3\2\2\2\u02fa\u02f8\3\2\2\2\u02fa"+ + "\u02fb\3\2\2\2\u02fb\u02fd\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fd\u02fe\5`"+ + "\61\2\u02fe_\3\2\2\2\u02ff\u0302\5d\63\2\u0300\u0302\5f\64\2\u0301\u02ff"+ + "\3\2\2\2\u0301\u0300\3\2\2\2\u0302a\3\2\2\2\u0303\u0304\5`\61\2\u0304"+ + "\u0305\7\6\2\2\u0305\u0307\3\2\2\2\u0306\u0303\3\2\2\2\u0306\u0307\3\2"+ + "\2\2\u0307\u0308\3\2\2\2\u0308\u0310\7\u0083\2\2\u0309\u030a\5`\61\2\u030a"+ + "\u030b\7\6\2\2\u030b\u030d\3\2\2\2\u030c\u0309\3\2\2\2\u030c\u030d\3\2"+ + "\2\2\u030d\u030e\3\2\2\2\u030e\u0310\5`\61\2\u030f\u0306\3\2\2\2\u030f"+ + "\u030c\3\2\2\2\u0310c\3\2\2\2\u0311\u0314\7\u0084\2\2\u0312\u0314\7\u0085"+ + "\2\2\u0313\u0311\3\2\2\2\u0313\u0312\3\2\2\2\u0314e\3\2\2\2\u0315\u0319"+ + "\7\u0081\2\2\u0316\u0319\5n8\2\u0317\u0319\7\u0082\2\2\u0318\u0315\3\2"+ + "\2\2\u0318\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319g\3\2\2\2\u031a\u031d"+ + "\7\u0080\2\2\u031b\u031d\7\177\2\2\u031c\u031a\3\2\2\2\u031c\u031b\3\2"+ + "\2\2\u031di\3\2\2\2\u031e\u031f\t\20\2\2\u031fk\3\2\2\2\u0320\u0321\7"+ + "a\2\2\u0321\u0322\5,\27\2\u0322\u0323\7Z\2\2\u0323\u0324\5,\27\2\u0324"+ + "m\3\2\2\2\u0325\u0326\t\21\2\2\u0326o\3\2\2\2o\177\u0081\u0085\u008e\u0090"+ + "\u0094\u009b\u009f\u00a5\u00aa\u00af\u00b3\u00b8\u00c0\u00c4\u00cc\u00cf"+ + "\u00d5\u00da\u00dd\u00e2\u00e5\u00e7\u00ef\u00f2\u00fe\u0101\u0104\u010b"+ + "\u0112\u0116\u011a\u011e\u0125\u0129\u012d\u0132\u0136\u013e\u0142\u0149"+ + "\u0154\u0157\u015b\u0167\u016a\u0170\u0177\u017e\u0181\u0185\u0189\u018d"+ + "\u018f\u019a\u019f\u01a2\u01a6\u01a9\u01af\u01b2\u01b8\u01bb\u01bd\u01e0"+ + "\u01e8\u01ea\u01f1\u01f6\u01f9\u0201\u020a\u0210\u0218\u021d\u0223\u0226"+ + "\u022d\u0235\u023b\u0247\u0249\u0254\u0263\u0268\u026c\u0270\u0277\u027d"+ + "\u0289\u029e\u02ac\u02b1\u02b8\u02bb\u02c2\u02cb\u02de\u02e6\u02ea\u02ef"+ + "\u02fa\u0301\u0306\u030c\u030f\u0313\u0318\u031c"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index 9920293794ea0..6166d87703ead 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.sql.parser; -import com.carrotsearch.hppc.ObjectShortHashMap; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonToken; @@ -26,16 +25,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BackQuotedIdentifierContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanDefaultContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanExpressionContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimaryExpressionContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryPrimaryDefaultContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryTermContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QuoteIdentifierContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StatementContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StatementDefaultContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.UnquoteIdentifierContext; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; @@ -50,7 +39,6 @@ import java.util.function.Function; import static java.lang.String.format; -import static org.elasticsearch.xpack.sql.parser.AbstractBuilder.source; public class SqlParser { @@ -100,45 +88,49 @@ private T invokeParser(String sql, List params, Function parseFunction, BiFunction visitor) { - SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql)); + try { + SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql)); - lexer.removeErrorListeners(); - lexer.addErrorListener(ERROR_LISTENER); + lexer.removeErrorListeners(); + lexer.addErrorListener(ERROR_LISTENER); - Map paramTokens = new HashMap<>(); - TokenSource tokenSource = new ParametrizedTokenSource(lexer, paramTokens, params); + Map paramTokens = new HashMap<>(); + TokenSource tokenSource = new ParametrizedTokenSource(lexer, paramTokens, params); - CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); - SqlBaseParser parser = new SqlBaseParser(tokenStream); + CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); + SqlBaseParser parser = new SqlBaseParser(tokenStream); - parser.addParseListener(new CircuitBreakerListener()); - parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); + parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); - parser.removeErrorListeners(); - parser.addErrorListener(ERROR_LISTENER); + parser.removeErrorListeners(); + parser.addErrorListener(ERROR_LISTENER); - parser.getInterpreter().setPredictionMode(PredictionMode.SLL); + parser.getInterpreter().setPredictionMode(PredictionMode.SLL); - if (DEBUG) { - debug(parser); - tokenStream.fill(); + if (DEBUG) { + debug(parser); + tokenStream.fill(); - for (Token t : tokenStream.getTokens()) { - String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType()); - String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType()); - log.info(format(Locale.ROOT, " %-15s '%s'", + for (Token t : tokenStream.getTokens()) { + String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType()); + String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType()); + log.info(format(Locale.ROOT, " %-15s '%s'", symbolicName == null ? literalName : symbolicName, t.getText())); + } } - } - ParserRuleContext tree = parseFunction.apply(parser); + ParserRuleContext tree = parseFunction.apply(parser); - if (DEBUG) { - log.info("Parse tree {} " + tree.toStringTree()); - } + if (DEBUG) { + log.info("Parse tree {} " + tree.toStringTree()); + } - return visitor.apply(new AstBuilder(paramTokens), tree); + return visitor.apply(new AstBuilder(paramTokens), tree); + } catch (StackOverflowError e) { + throw new ParsingException("SQL statement is too large, " + + "causing stack overflow when generating the parsing tree: [{}]", sql); + } } private static void debug(SqlBaseParser parser) { @@ -221,93 +213,6 @@ public void exitNonReserved(SqlBaseParser.NonReservedContext context) { } } - /** - * Used to catch large expressions that can lead to stack overflows - */ - static class CircuitBreakerListener extends SqlBaseBaseListener { - - private static final short MAX_RULE_DEPTH = 200; - - /** - * Due to the structure of the grammar and our custom handling in {@link ExpressionBuilder} - * some expressions can exit with a different class than they entered: - * e.g.: ValueExpressionContext can exit as ValueExpressionDefaultContext - */ - private static final Map ENTER_EXIT_RULE_MAPPING = new HashMap<>(); - - static { - ENTER_EXIT_RULE_MAPPING.put(StatementDefaultContext.class.getSimpleName(), StatementContext.class.getSimpleName()); - ENTER_EXIT_RULE_MAPPING.put(QueryPrimaryDefaultContext.class.getSimpleName(), QueryTermContext.class.getSimpleName()); - ENTER_EXIT_RULE_MAPPING.put(BooleanDefaultContext.class.getSimpleName(), BooleanExpressionContext.class.getSimpleName()); - } - - private boolean insideIn = false; - - // Keep current depth for every rule visited. - // The totalDepth alone cannot be used as expressions like: e1 OR e2 OR e3 OR ... - // are processed as e1 OR (e2 OR (e3 OR (... and this results in the totalDepth not growing - // while the stack call depth is, leading to a StackOverflowError. - private ObjectShortHashMap depthCounts = new ObjectShortHashMap<>(); - - @Override - public void enterEveryRule(ParserRuleContext ctx) { - if (inDetected(ctx)) { - insideIn = true; - } - - // Skip PrimaryExpressionContext for IN as it's not visited on exit due to - // the grammar's peculiarity rule with "predicated" and "predicate". - // Also skip the Identifiers as they are "cheap". - if (ctx.getClass() != UnquoteIdentifierContext.class && - ctx.getClass() != QuoteIdentifierContext.class && - ctx.getClass() != BackQuotedIdentifierContext.class && - ctx.getClass() != SqlBaseParser.ConstantContext.class && - ctx.getClass() != SqlBaseParser.NumberContext.class && - ctx.getClass() != SqlBaseParser.ValueExpressionContext.class && - (insideIn == false || ctx.getClass() != PrimaryExpressionContext.class)) { - - int currentDepth = depthCounts.putOrAdd(ctx.getClass().getSimpleName(), (short) 1, (short) 1); - if (currentDepth > MAX_RULE_DEPTH) { - throw new ParsingException(source(ctx), "SQL statement too large; " + - "halt parsing to prevent memory errors (stopped at depth {})", MAX_RULE_DEPTH); - } - } - super.enterEveryRule(ctx); - } - - @Override - public void exitEveryRule(ParserRuleContext ctx) { - if (inDetected(ctx)) { - insideIn = false; - } - - decrementCounter(ctx); - super.exitEveryRule(ctx); - } - - ObjectShortHashMap depthCounts() { - return depthCounts; - } - - private void decrementCounter(ParserRuleContext ctx) { - String className = ctx.getClass().getSimpleName(); - String classNameToDecrement = ENTER_EXIT_RULE_MAPPING.getOrDefault(className, className); - - // Avoid having negative numbers - if (depthCounts.containsKey(classNameToDecrement)) { - depthCounts.putOrAdd(classNameToDecrement, (short) 0, (short) -1); - } - } - - private boolean inDetected(ParserRuleContext ctx) { - if (ctx.getParent() != null && ctx.getParent().getClass() == SqlBaseParser.PredicateContext.class) { - SqlBaseParser.PredicateContext pc = (SqlBaseParser.PredicateContext) ctx.getParent(); - return pc.kind != null && pc.kind.getType() == SqlBaseParser.IN; - } - return false; - } - } - private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() { @Override public void syntaxError(Recognizer recognizer, Object offendingSymbol, int line, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java index a90fb751c5e70..01a88ef06fcec 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java @@ -24,16 +24,18 @@ public class EsRelation extends LeafPlan { private final EsIndex index; private final List attrs; + private final boolean frozen; - public EsRelation(Source source, EsIndex index) { + public EsRelation(Source source, EsIndex index, boolean frozen) { super(source); this.index = index; - attrs = flatten(source, index.mapping()); + this.attrs = flatten(source, index.mapping()); + this.frozen = frozen; } @Override protected NodeInfo info() { - return NodeInfo.create(this, EsRelation::new, index); + return NodeInfo.create(this, EsRelation::new, index, frozen); } private static List flatten(Source source, Map mapping) { @@ -63,6 +65,10 @@ public EsIndex index() { return index; } + public boolean frozen() { + return frozen; + } + @Override public List output() { return attrs; @@ -75,7 +81,7 @@ public boolean expressionsResolved() { @Override public int hashCode() { - return Objects.hash(index); + return Objects.hash(index, frozen); } @Override @@ -89,7 +95,8 @@ public boolean equals(Object obj) { } EsRelation other = (EsRelation) obj; - return Objects.equals(index, other.index); + return Objects.equals(index, other.index) + && frozen == other.frozen; } private static final int TO_STRING_LIMIT = 52; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java index fa9e2326f2cc5..09ce8023f25ba 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelation.java @@ -20,23 +20,25 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final TableIdentifier table; + private final boolean frozen; private final String alias; private final String unresolvedMsg; - public UnresolvedRelation(Source source, TableIdentifier table, String alias) { - this(source, table, alias, null); + public UnresolvedRelation(Source source, TableIdentifier table, String alias, boolean frozen) { + this(source, table, alias, frozen, null); } - public UnresolvedRelation(Source source, TableIdentifier table, String alias, String unresolvedMessage) { + public UnresolvedRelation(Source source, TableIdentifier table, String alias, boolean frozen, String unresolvedMessage) { super(source); this.table = table; this.alias = alias; + this.frozen = frozen; this.unresolvedMsg = unresolvedMessage == null ? "Unknown index [" + table.index() + "]" : unresolvedMessage; } @Override protected NodeInfo info() { - return NodeInfo.create(this, UnresolvedRelation::new, table, alias, unresolvedMsg); + return NodeInfo.create(this, UnresolvedRelation::new, table, alias, frozen, unresolvedMsg); } public TableIdentifier table() { @@ -47,6 +49,10 @@ public String alias() { return alias; } + public boolean frozen() { + return frozen; + } + @Override public boolean resolved() { return false; @@ -86,6 +92,7 @@ public boolean equals(Object obj) { return source().equals(other.source()) && table.equals(other.table) && Objects.equals(alias, other.alias) + && Objects.equals(frozen, other.frozen) && unresolvedMsg.equals(other.unresolvedMsg); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index ed21b52114064..7cddc3fc0a7e9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -31,11 +31,13 @@ public class ShowColumns extends Command { private final String index; private final LikePattern pattern; + private final boolean includeFrozen; - public ShowColumns(Source source, String index, LikePattern pattern) { + public ShowColumns(Source source, String index, LikePattern pattern, boolean includeFrozen) { super(source); this.index = index; this.pattern = pattern; + this.includeFrozen = includeFrozen; } public String index() { @@ -48,7 +50,7 @@ public LikePattern pattern() { @Override protected NodeInfo info() { - return NodeInfo.create(this, ShowColumns::new, index, pattern); + return NodeInfo.create(this, ShowColumns::new, index, pattern, includeFrozen); } @Override @@ -62,7 +64,9 @@ public List output() { public void execute(SqlSession session, ActionListener listener) { String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*"); String regex = pattern != null ? pattern.asJavaRegex() : null; - session.indexResolver().resolveAsMergedMapping(idx, regex, ActionListener.wrap( + + boolean withFrozen = includeFrozen || session.configuration().includeFrozen(); + session.indexResolver().resolveAsMergedMapping(idx, regex, withFrozen, ActionListener.wrap( indexResult -> { List> rows = emptyList(); if (indexResult.isValid()) { @@ -92,7 +96,7 @@ private void fillInRows(Map mapping, String prefix, List info() { - return NodeInfo.create(this, ShowTables::new, index, pattern); + return NodeInfo.create(this, ShowTables::new, index, pattern, includeFrozen); } public String index() { @@ -46,23 +50,28 @@ public LikePattern pattern() { @Override public List output() { - return asList(keyword("name"), keyword("type")); + return asList(keyword("name"), keyword("type"), keyword("kind")); } @Override public final void execute(SqlSession session, ActionListener listener) { String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*"); String regex = pattern != null ? pattern.asJavaRegex() : null; - session.indexResolver().resolveNames(idx, regex, null, ActionListener.wrap(result -> { + + // to avoid redundancy, indicate whether frozen fields are required by specifying the type + EnumSet withFrozen = session.configuration().includeFrozen() || includeFrozen ? + IndexType.VALID_INCLUDE_FROZEN : IndexType.VALID_REGULAR; + + session.indexResolver().resolveNames(idx, regex, withFrozen, ActionListener.wrap(result -> { listener.onResponse(Rows.of(output(), result.stream() - .map(t -> asList(t.name(), t.type().toSql())) + .map(t -> asList(t.name(), t.type().toSql(), t.type().toNative())) .collect(toList()))); }, listener::onFailure)); } @Override public int hashCode() { - return Objects.hash(index, pattern); + return Objects.hash(index, pattern, includeFrozen); } @Override @@ -76,7 +85,8 @@ public boolean equals(Object obj) { } ShowTables other = (ShowTables) obj; - return Objects.equals(index, other.index) - && Objects.equals(pattern, other.pattern); + return Objects.equals(index, other.index) + && Objects.equals(pattern, other.pattern) + && includeFrozen == other.includeFrozen; } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java index 674045ab692fa..16d6eae924e36 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java @@ -109,16 +109,17 @@ public void execute(SqlSession session, ActionListener listener) { } // save original index name (as the pattern can contain special chars) - String indexName = index != null ? index : (pattern != null ? StringUtils.likeToUnescaped(pattern.pattern(), - pattern.escape()) : ""); + String indexName = index != null ? index : + (pattern != null ? StringUtils.likeToUnescaped(pattern.pattern(), pattern.escape()) : ""); String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*"); String regex = pattern != null ? pattern.asJavaRegex() : null; Pattern columnMatcher = columnPattern != null ? Pattern.compile(columnPattern.asJavaRegex()) : null; + boolean includeFrozen = session.configuration().includeFrozen(); // special case for '%' (translated to *) if ("*".equals(idx)) { - session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> { + session.indexResolver().resolveAsSeparateMappings(idx, regex, includeFrozen, ActionListener.wrap(esIndices -> { List> rows = new ArrayList<>(); for (EsIndex esIndex : esIndices) { fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher, mode); @@ -129,7 +130,7 @@ public void execute(SqlSession session, ActionListener listener) { } // otherwise use a merged mapping else { - session.indexResolver().resolveAsMergedMapping(idx, regex, ActionListener.wrap(r -> { + session.indexResolver().resolveAsMergedMapping(idx, regex, includeFrozen, ActionListener.wrap(r -> { List> rows = new ArrayList<>(); // populate the data only when a target is found if (r.isValid() == true) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java index 53f1e1019b753..111b392adb6b7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java @@ -22,6 +22,7 @@ import java.util.EnumSet; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.regex.Pattern; import static java.util.Arrays.asList; @@ -89,6 +90,8 @@ public final void execute(SqlSession session, ActionListener liste } } + boolean includeFrozen = session.configuration().includeFrozen(); + // enumerate types // if no types are specified (the parser takes care of the % case) if (types == null) { @@ -98,7 +101,10 @@ public final void execute(SqlSession session, ActionListener liste && pattern != null && pattern.pattern().isEmpty() && index == null) { List> values = new ArrayList<>(); // send only the types, everything else is made of empty strings - for (IndexType type : IndexType.VALID) { + // NB: since the types are sent in SQL, frozen doesn't have to be taken into account since + // it's just another BASE TABLE + Set typeSet = IndexType.VALID_REGULAR; + for (IndexType type : typeSet) { Object[] enumeration = new Object[10]; enumeration[3] = type.toSql(); values.add(asList(enumeration)); @@ -110,6 +116,7 @@ public final void execute(SqlSession session, ActionListener liste } } + // no enumeration pattern found, list actual tables String cRegex = clusterPattern != null ? clusterPattern.asJavaRegex() : null; @@ -122,7 +129,18 @@ public final void execute(SqlSession session, ActionListener liste String idx = index != null ? index : (pattern != null ? pattern.asIndexNameWildcard() : "*"); String regex = pattern != null ? pattern.asJavaRegex() : null; - session.indexResolver().resolveNames(idx, regex, types, ActionListener.wrap(result -> listener.onResponse( + EnumSet tableTypes = types; + + // initialize types for name resolution + if (tableTypes == null) { + tableTypes = includeFrozen ? IndexType.VALID_INCLUDE_FROZEN : IndexType.VALID_REGULAR; + } else { + if (includeFrozen && tableTypes.contains(IndexType.FROZEN_INDEX) == false) { + tableTypes.add(IndexType.FROZEN_INDEX); + } + } + + session.indexResolver().resolveNames(idx, regex, tableTypes, ActionListener.wrap(result -> listener.onResponse( Rows.of(output(), result.stream() // sort by type (which might be legacy), then by name .sorted(Comparator. comparing(i -> legacyName(i.type())) @@ -142,7 +160,7 @@ public final void execute(SqlSession session, ActionListener liste } private String legacyName(IndexType indexType) { - return legacyTableTypes && indexType == IndexType.INDEX ? "TABLE" : indexType.toSql(); + return legacyTableTypes && IndexType.INDICES_ONLY.contains(indexType) ? IndexType.SQL_TABLE : indexType.toSql(); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java index 92a60b4ee5576..b32ad961ae958 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java @@ -11,10 +11,10 @@ import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.Join; import org.elasticsearch.xpack.sql.plan.logical.Limit; +import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; import org.elasticsearch.xpack.sql.plan.logical.Project; -import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; @@ -22,10 +22,10 @@ import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; import org.elasticsearch.xpack.sql.plan.physical.LimitExec; +import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.OrderExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.sql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.rule.Rule; @@ -91,7 +91,11 @@ protected PhysicalPlan map(LogicalPlan p) { if (p instanceof EsRelation) { EsRelation c = (EsRelation) p; List output = c.output(); - return new EsQueryExec(p.source(), c.index().name(), output, new QueryContainer()); + QueryContainer container = new QueryContainer(); + if (c.frozen()) { + container = container.withFrozen(); + } + return new EsQueryExec(p.source(), c.index().name(), output, container); } if (p instanceof Limit) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 56554185ce84b..802d6d37b7c40 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -149,7 +149,8 @@ protected PhysicalPlan rule(ProjectExec project) { new AttributeMap<>(processors), queryC.sort(), queryC.limit(), - queryC.shouldTrackHits()); + queryC.shouldTrackHits(), + queryC.shouldIncludeFrozen()); return new EsQueryExec(exec.source(), exec.index(), project.output(), clone); } return project; @@ -178,7 +179,8 @@ protected PhysicalPlan rule(FilterExec plan) { qContainer.scalarFunctions(), qContainer.sort(), qContainer.limit(), - qContainer.shouldTrackHits()); + qContainer.shouldTrackHits(), + qContainer.shouldIncludeFrozen()); return exec.with(qContainer); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 8495b0269eb84..7e5516810d92a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.geo.geometry.Geometry; +import org.elasticsearch.geo.geometry.Point; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; @@ -38,6 +40,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; @@ -85,6 +89,7 @@ import org.elasticsearch.xpack.sql.querydsl.agg.TopHitsAgg; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.GeoDistanceQuery; import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.sql.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery; @@ -656,6 +661,24 @@ private static Query translateQuery(BinaryComparison bc) { Object value = valueOf(bc.right()); String format = dateFormat(bc.left()); + // Possible geo optimization + if (bc.left() instanceof StDistance && value instanceof Number) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + // Special case for ST_Distance translatable into geo_distance query + StDistance stDistance = (StDistance) bc.left(); + if (stDistance.left() instanceof FieldAttribute && stDistance.right().foldable()) { + Object geoShape = valueOf(stDistance.right()); + if (geoShape instanceof GeoShape) { + Geometry geometry = ((GeoShape) geoShape).toGeometry(); + if (geometry instanceof Point) { + String field = nameOf(stDistance.left()); + return new GeoDistanceQuery(source, field, ((Number) value).doubleValue(), + ((Point) geometry).getLat(), ((Point) geometry).getLon()); + } + } + } + } + } if (bc instanceof GreaterThan) { return new RangeQuery(source, name, value, false, null, false, format); } @@ -954,6 +977,9 @@ public QueryTranslation translate(Expression exp, boolean onAggs) { protected static Query handleQuery(ScalarFunction sf, Expression field, Supplier query) { Query q = query.get(); + if (field instanceof StDistance && q instanceof GeoDistanceQuery) { + return wrapIfNested(q, ((StDistance) field).left()); + } if (field instanceof FieldAttribute) { return wrapIfNested(q, field); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 6ba8219c2b580..771ab30d0f4c1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -137,5 +137,4 @@ public RestResponse buildResponse(SqlQueryResponse response) throws Exception { public String getName() { return "sql_query"; } - } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index d0c67f193b710..98bb25b8ebd81 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -47,7 +47,8 @@ public static void operation(PlanExecutor planExecutor, SqlClearCursorRequest re Cursor cursor = Cursors.decodeFromString(request.getCursor()); planExecutor.cleanCursor( new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, - request.mode(), StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, Protocol.FIELD_MULTI_VALUE_LENIENCY), + request.mode(), StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, Protocol.FIELD_MULTI_VALUE_LENIENCY, + Protocol.INDEX_INCLUDE_FROZEN), cursor, ActionListener.wrap( success -> listener.onResponse(new SqlClearCursorResponse(success)), listener::onFailure)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 44695f950224c..97fc583c3e886 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -72,7 +72,8 @@ public static void operation(PlanExecutor planExecutor, SqlQueryRequest request, // The configuration is always created however when dealing with the next page, only the timeouts are relevant // the rest having default values (since the query is already created) Configuration cfg = new Configuration(request.zoneId(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(), - request.filter(), request.mode(), request.clientId(), username, clusterName, request.fieldMultiValueLeniency()); + request.filter(), request.mode(), request.clientId(), username, clusterName, request.fieldMultiValueLeniency(), + request.indexIncludeFrozen()); if (Strings.hasText(request.cursor()) == false) { planExecutor.sql(cfg, request.query(), request.params(), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 0b9132df29025..9101557d1cd1a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -56,7 +56,8 @@ protected void doExecute(Task task, SqlTranslateRequest request, ActionListener< Configuration cfg = new Configuration(request.zoneId(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(), request.filter(), request.mode(), request.clientId(), - username(securityContext), clusterName(clusterService), Protocol.FIELD_MULTI_VALUE_LENIENCY); + username(securityContext), clusterName(clusterService), Protocol.FIELD_MULTI_VALUE_LENIENCY, + Protocol.INDEX_INCLUDE_FROZEN); planExecutor.searchSource(cfg, request.query(), request.params(), ActionListener.wrap( searchSourceBuilder -> listener.onResponse(new SqlTranslateResponse(searchSourceBuilder)), listener::onFailure)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 329eb9b566a05..827eade2e5924 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -81,25 +81,27 @@ public class QueryContainer { private final Set sort; private final int limit; private final boolean trackHits; + private final boolean includeFrozen; // computed private Boolean aggsOnly; private Boolean customSort; public QueryContainer() { - this(null, null, null, null, null, null, null, -1, false); + this(null, null, null, null, null, null, null, -1, false, false); } - public QueryContainer(Query query, - Aggs aggs, - List> fields, + public QueryContainer(Query query, + Aggs aggs, + List> fields, AttributeMap aliases, - Map pseudoFunctions, - AttributeMap scalarFunctions, - Set sort, - int limit, - boolean trackHits) { + Map pseudoFunctions, + AttributeMap scalarFunctions, + Set sort, + int limit, + boolean trackHits, + boolean includeFrozen) { this.query = query; this.aggs = aggs == null ? Aggs.EMPTY : aggs; this.fields = fields == null || fields.isEmpty() ? emptyList() : fields; @@ -109,6 +111,7 @@ public QueryContainer(Query query, this.sort = sort == null || sort.isEmpty() ? emptySet() : sort; this.limit = limit; this.trackHits = trackHits; + this.includeFrozen = includeFrozen; } /** @@ -237,42 +240,53 @@ public boolean shouldTrackHits() { return trackHits; } + public boolean shouldIncludeFrozen() { + return includeFrozen; + } + // // copy methods // public QueryContainer with(Query q) { - return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits); + return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); } public QueryContainer withAliases(AttributeMap a) { - return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits); + return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); } public QueryContainer withPseudoFunctions(Map p) { - return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits); + return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits, includeFrozen); } public QueryContainer with(Aggs a) { - return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits); + return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); } public QueryContainer withLimit(int l) { - return l == limit ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, l, trackHits); + return l == limit ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, l, trackHits, + includeFrozen); } public QueryContainer withTrackHits() { - return trackHits ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, true); + return trackHits ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, true, + includeFrozen); + } + + public QueryContainer withFrozen() { + return includeFrozen ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, + trackHits, true); } public QueryContainer withScalarProcessors(AttributeMap procs) { - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen); } public QueryContainer addSort(Sort sortable) { Set sort = new LinkedHashSet<>(this.sort); sort.add(sortable); - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); } private String aliasName(Attribute attr) { @@ -294,7 +308,8 @@ private Tuple nestedHitFieldRef(FieldAttribute SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().getDataType(), attr.field().isAggregatable(), attr.parent().name()); - return new Tuple<>(new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits), + return new Tuple<>( + new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen), nestedFieldRef); } @@ -397,7 +412,7 @@ public QueryContainer addColumn(FieldExtraction ref, Attribute attr) { ExpressionId id = attr instanceof AggregateFunctionAttribute ? ((AggregateFunctionAttribute) attr).innerId() : attr.id(); return new QueryContainer(query, aggs, combine(fields, new Tuple<>(ref, id)), aliases, pseudoFunctions, scalarFunctions, - sort, limit, trackHits); + sort, limit, trackHits, includeFrozen); } public AttributeMap scalarFunctions() { @@ -430,7 +445,7 @@ public QueryContainer updateGroup(GroupByKey group) { @Override public int hashCode() { - return Objects.hash(query, aggs, fields, aliases, sort, limit); + return Objects.hash(query, aggs, fields, aliases, sort, limit, trackHits, includeFrozen); } @Override @@ -449,7 +464,9 @@ public boolean equals(Object obj) { && Objects.equals(fields, other.fields) && Objects.equals(aliases, other.aliases) && Objects.equals(sort, other.sort) - && Objects.equals(limit, other.limit); + && Objects.equals(limit, other.limit) + && Objects.equals(trackHits, other.trackHits) + && Objects.equals(includeFrozen, other.includeFrozen); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java new file mode 100644 index 0000000000000..dd1a1171c1603 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/GeoDistanceQuery.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.querydsl.query; + +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.Objects; + +public class GeoDistanceQuery extends LeafQuery { + + private final String field; + private final double lat; + private final double lon; + private final double distance; + + public GeoDistanceQuery(Source source, String field, double distance, double lat, double lon) { + super(source); + this.field = field; + this.distance = distance; + this.lat = lat; + this.lon = lon; + } + + public String field() { + return field; + } + + public double lat() { + return lat; + } + + public double lon() { + return lon; + } + + public double distance() { + return distance; + } + + @Override + public QueryBuilder asBuilder() { + return QueryBuilders.geoDistanceQuery(field).distance(distance, DistanceUnit.METERS).point(lat, lon); + } + + @Override + public int hashCode() { + return Objects.hash(field, distance, lat, lon); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GeoDistanceQuery other = (GeoDistanceQuery) obj; + return Objects.equals(field, other.field) && + Objects.equals(distance, other.distance) && + Objects.equals(lat, other.lat) && + Objects.equals(lon, other.lon); + } + + @Override + protected String innerToString() { + return field + ":" + "(" + distance + "," + "(" + lat + ", " + lon + "))"; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index d03ac08305e3e..5b9901ccae447 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -25,6 +25,7 @@ public class Configuration { private final String clusterName; private final boolean multiValueFieldLeniency; private final ZonedDateTime now; + private final boolean includeFrozenIndices; @Nullable private QueryBuilder filter; @@ -32,7 +33,8 @@ public class Configuration { public Configuration(ZoneId zi, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode, String clientId, String username, String clusterName, - boolean multiValueFieldLeniency) { + boolean multiValueFieldLeniency, + boolean includeFrozen) { this.zoneId = zi.normalized(); this.pageSize = pageSize; this.requestTimeout = requestTimeout; @@ -44,6 +46,7 @@ public Configuration(ZoneId zi, int pageSize, TimeValue requestTimeout, TimeValu this.clusterName = clusterName; this.multiValueFieldLeniency = multiValueFieldLeniency; this.now = ZonedDateTime.now(zoneId); + this.includeFrozenIndices = includeFrozen; } public ZoneId zoneId() { @@ -80,7 +83,7 @@ public String username() { public String clusterName() { return clusterName; } - + public ZonedDateTime now() { return now; } @@ -88,4 +91,8 @@ public ZonedDateTime now() { public boolean multiValueFieldLeniency() { return multiValueFieldLeniency; } + + public boolean includeFrozen() { + return includeFrozenIndices; + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java index ae1a6f14da522..6a5b5bd2ae5fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/SqlSession.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.PreAnalyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.PreAnalyzer.PreAnalysis; +import org.elasticsearch.xpack.sql.analysis.analyzer.TableInfo; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; @@ -128,7 +129,8 @@ private void preAnalyze(LogicalPlan parsed, Function act // Note: JOINs are not supported but we detect them when listener.onFailure(new MappingException("Queries with multiple indices are not supported")); } else if (preAnalysis.indices.size() == 1) { - TableIdentifier table = preAnalysis.indices.get(0); + TableInfo tableInfo = preAnalysis.indices.get(0); + TableIdentifier table = tableInfo.id(); String cluster = table.cluster(); @@ -136,7 +138,8 @@ private void preAnalyze(LogicalPlan parsed, Function act listener.onFailure(new MappingException("Cannot inspect indices in cluster/catalog [{}]", cluster)); } - indexResolver.resolveAsMergedMapping(table.index(), null, + boolean includeFrozen = configuration.includeFrozen() || tableInfo.isFrozen(); + indexResolver.resolveAsMergedMapping(table.index(), null, includeFrozen, wrap(indexResult -> listener.onResponse(action.apply(indexResult)), listener::onFailure)); } else { try { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 1f04e7c8e1982..76f2436e8629c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -53,6 +53,9 @@ public enum DataType { // // specialized types // + GEO_SHAPE( ExtTypes.GEOMETRY, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false), + // display size = 2 doubles + len("POINT( )") + GEO_POINT( ExtTypes.GEOMETRY, Double.BYTES*2, Integer.MAX_VALUE, 25 * 2 + 8, false, false, false), // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 // aka 39 chars IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true), @@ -251,6 +254,10 @@ public boolean isPrimitive() { return this != OBJECT && this != NESTED && this != UNSUPPORTED; } + public boolean isGeo() { + return this == GEO_POINT || this == GEO_SHAPE; + } + public boolean isDateBased() { return this == DATE || this == DATETIME; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 40a03e26eb0ef..5fd1867aeb27a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -121,6 +121,17 @@ public static DataType commonType(DataType left, DataType right) { return right; } } + // Interval * integer is a valid operation + if (DataTypes.isInterval(left)) { + if (right.isInteger()) { + return left; + } + } + if (DataTypes.isInterval(right)) { + if (left.isInteger()) { + return right; + } + } if (DataTypes.isInterval(left)) { // intervals widening if (DataTypes.isInterval(right)) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index dcd6a1b35a13e..3f985ae4e3b6e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.type; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.expression.literal.Interval; import java.time.OffsetTime; @@ -81,6 +82,9 @@ public static DataType fromJava(Object value) { if (value instanceof Interval) { return ((Interval) value).dataType(); } + if (value instanceof GeoShape) { + return DataType.GEO_SHAPE; + } throw new SqlIllegalArgumentException("No idea what's the DataType for {}", value.getClass()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java index 1ad9dd92abfec..2c07be3eb620d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/ExtTypes.java @@ -27,7 +27,8 @@ enum ExtTypes implements SQLType { INTERVAL_DAY_TO_SECOND(110), INTERVAL_HOUR_TO_MINUTE(111), INTERVAL_HOUR_TO_SECOND(112), - INTERVAL_MINUTE_TO_SECOND(113); + INTERVAL_MINUTE_TO_SECOND(113), + GEOMETRY(114); private final Integer type; diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 4ac4632572ca0..6d24ea79f2bc2 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -4,7 +4,14 @@ # you may not use this file except in compliance with the Elastic License. # -# This file contains a whitelist for SQL specific utilities available inside SQL scripting +# This file contains a whitelist for SQL specific utilities and classes available inside SQL scripting + +#### Classes + +class org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape { + +} + class org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime { } @@ -137,7 +144,19 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String space(Number) String substring(String, Number, Number) String ucase(String) - + +# +# Geo Functions +# + GeoShape geoDocValue(java.util.Map, String) + String stAswkt(Object) + Double stDistance(Object, Object) + String stGeometryType(Object) + GeoShape stWktToSql(String) + Double stX(Object) + Double stY(Object) + Double stZ(Object) + # # Casting # diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java index afec8063edab0..edce320c5b484 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java @@ -15,6 +15,7 @@ import java.time.ZoneId; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; @@ -27,7 +28,7 @@ private TestUtils() {} public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, - null, null, null, false); + null, null, null, false, false); public static Configuration randomConfiguration() { return new Configuration(randomZone(), @@ -39,7 +40,8 @@ public static Configuration randomConfiguration() { randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), - false); + false, + randomBoolean()); } public static Configuration randomConfiguration(ZoneId providedZoneId) { @@ -52,7 +54,8 @@ public static Configuration randomConfiguration(ZoneId providedZoneId) { randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), - false); + false, + randomBoolean()); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java index bc7b85b5392e9..b36111ffac3bb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java @@ -158,7 +158,7 @@ public void testDottedFieldPathTypo() { public void testStarExpansionExcludesObjectAndUnsupportedTypes() { LogicalPlan plan = plan("SELECT * FROM test"); List list = ((Project) plan).projections(); - assertThat(list, hasSize(8)); + assertThat(list, hasSize(10)); List names = Expressions.names(list); assertThat(names, not(hasItem("some"))); assertThat(names, not(hasItem("some.dotted"))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java index f37378c8fa994..ca3b2f14ebc80 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/PreAnalyzerTests.java @@ -24,8 +24,8 @@ public void testBasicIndex() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), nullValue()); - assertThat(result.indices.get(0).index(), is("index")); + assertThat(result.indices.get(0).id().cluster(), nullValue()); + assertThat(result.indices.get(0).id().index(), is("index")); } public void testBasicIndexWithCatalog() { @@ -33,8 +33,8 @@ public void testBasicIndexWithCatalog() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), is("elastic")); - assertThat(result.indices.get(0).index(), is("index")); + assertThat(result.indices.get(0).id().cluster(), is("elastic")); + assertThat(result.indices.get(0).id().index(), is("index")); } public void testWildIndexWithCatalog() { @@ -42,8 +42,8 @@ public void testWildIndexWithCatalog() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), is("elastic")); - assertThat(result.indices.get(0).index(), is("index*")); + assertThat(result.indices.get(0).id().cluster(), is("elastic")); + assertThat(result.indices.get(0).id().index(), is("index*")); } public void testQuotedIndex() { @@ -51,8 +51,8 @@ public void testQuotedIndex() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), nullValue()); - assertThat(result.indices.get(0).index(), is("aaa")); + assertThat(result.indices.get(0).id().cluster(), nullValue()); + assertThat(result.indices.get(0).id().index(), is("aaa")); } public void testQuotedCatalog() { @@ -60,8 +60,8 @@ public void testQuotedCatalog() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), is("elastic")); - assertThat(result.indices.get(0).index(), is("aaa")); + assertThat(result.indices.get(0).id().cluster(), is("elastic")); + assertThat(result.indices.get(0).id().index(), is("aaa")); } public void testComplicatedQuery() { @@ -69,7 +69,7 @@ public void testComplicatedQuery() { PreAnalysis result = preAnalyzer.preAnalyze(plan); assertThat(plan.preAnalyzed(), is(true)); assertThat(result.indices, hasSize(1)); - assertThat(result.indices.get(0).cluster(), nullValue()); - assertThat(result.indices.get(0).index(), is("aaa")); + assertThat(result.indices.get(0).id().cluster(), nullValue()); + assertThat(result.indices.get(0).id().index(), is("aaa")); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index dcf8dad5ecb79..f10b1a402708f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -241,6 +241,27 @@ public void testSubtractFromInterval() { error("SELECT INTERVAL 1 MONTH - CAST('12:23:56.789' AS TIME)")); } + public void testAddIntervalAndNumberNotAllowed() { + assertEquals("1:8: [+] has arguments with incompatible types [INTERVAL_DAY] and [INTEGER]", + error("SELECT INTERVAL 1 DAY + 100")); + assertEquals("1:8: [+] has arguments with incompatible types [INTEGER] and [INTERVAL_DAY]", + error("SELECT 100 + INTERVAL 1 DAY")); + } + + public void testSubtractIntervalAndNumberNotAllowed() { + assertEquals("1:8: [-] has arguments with incompatible types [INTERVAL_MINUTE] and [DOUBLE]", + error("SELECT INTERVAL 10 MINUTE - 100.0")); + assertEquals("1:8: [-] has arguments with incompatible types [DOUBLE] and [INTERVAL_MINUTE]", + error("SELECT 100.0 - INTERVAL 10 MINUTE")); + } + + public void testMultiplyIntervalWithDecimalNotAllowed() { + assertEquals("1:8: [*] has arguments with incompatible types [INTERVAL_MONTH] and [DOUBLE]", + error("SELECT INTERVAL 1 MONTH * 1.234")); + assertEquals("1:8: [*] has arguments with incompatible types [DOUBLE] and [INTERVAL_MONTH]", + error("SELECT 1.234 * INTERVAL 1 MONTH")); + } + public void testMultipleColumns() { assertEquals("1:43: Unknown column [xxx]\nline 1:8: Unknown column [xxx]", error("SELECT xxx FROM test GROUP BY DAY_oF_YEAR(xxx)")); @@ -773,4 +794,28 @@ public void testAggregateAliasInFilter() { public void testProjectUnresolvedAliasInFilter() { assertEquals("1:8: Unknown column [tni]", error("SELECT tni AS i FROM test WHERE i > 10 GROUP BY i")); } + + public void testGeoShapeInWhereClause() { + assertEquals("1:49: geo shapes cannot be used for filtering", + error("SELECT ST_AsWKT(shape) FROM test WHERE ST_AsWKT(shape) = 'point (10 20)'")); + + // We get only one message back because the messages are grouped by the node that caused the issue + assertEquals("1:46: geo shapes cannot be used for filtering", + error("SELECT MAX(ST_X(shape)) FROM test WHERE ST_Y(shape) > 10 GROUP BY ST_GEOMETRYTYPE(shape) ORDER BY ST_ASWKT(shape)")); + } + + public void testGeoShapeInGroupBy() { + assertEquals("1:44: geo shapes cannot be used in grouping", + error("SELECT ST_X(shape) FROM test GROUP BY ST_X(shape)")); + } + + public void testGeoShapeInOrderBy() { + assertEquals("1:44: geo shapes cannot be used for sorting", + error("SELECT ST_X(shape) FROM test ORDER BY ST_Z(shape)")); + } + + public void testGeoShapeInSelect() { + accept("SELECT ST_X(shape) FROM test"); + } + } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java index f2dccc396dbd3..811a8ff4256f2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java @@ -30,7 +30,7 @@ public static CompositeAggregationCursor randomCompositeCursor() { } return new CompositeAggregationCursor(new byte[randomInt(256)], extractors, randomBitSet(extractorsSize), - randomIntBetween(10, 1024), randomAlphaOfLength(5)); + randomIntBetween(10, 1024), randomBoolean(), randomAlphaOfLength(5)); } static BucketExtractor randomBucketExtractor() { @@ -46,7 +46,7 @@ protected CompositeAggregationCursor mutateInstance(CompositeAggregationCursor i return new CompositeAggregationCursor(instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> randomBitSet(instance.extractors().size())), randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 512)), - instance.indices()); + randomBoolean(), instance.indices()); } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 973d5b50fad00..50a3b185dba86 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.SqlException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.DateUtils; @@ -451,6 +452,125 @@ public void testObjectsForSourceValue() throws IOException { assertThat(ex.getMessage(), is("Objects (returned by [" + fieldName + "]) are not supported")); } + public void testGeoShapeExtraction() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false); + Map map = new HashMap<>(); + map.put(fieldName, "POINT (1 2)"); + assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); + + map = new HashMap<>(); + assertNull(fe.extractFromSource(map)); + } + + + public void testMultipleGeoShapeExtraction() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false); + Map map = new HashMap<>(); + map.put(fieldName, "POINT (1 2)"); + assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); + + map = new HashMap<>(); + assertNull(fe.extractFromSource(map)); + + Map map2 = new HashMap<>(); + map2.put(fieldName, Arrays.asList("POINT (1 2)", "POINT (3 4)")); + SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map2)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false, true); + assertEquals(new GeoShape(1, 2), lenientFe.extractFromSource(map2)); + } + + public void testGeoPointExtractionFromSource() throws IOException { + int layers = randomIntBetween(1, 3); + String pathCombined = ""; + double lat = randomDoubleBetween(-90, 90, true); + double lon = randomDoubleBetween(-180, 180, true); + SearchHit hit = new SearchHit(1); + XContentBuilder source = JsonXContent.contentBuilder(); + boolean[] arrayWrap = new boolean[layers - 1]; + source.startObject(); { + for (int i = 0; i < layers - 1; i++) { + arrayWrap[i] = randomBoolean(); + String name = randomAlphaOfLength(10); + source.field(name); + if (arrayWrap[i]) { + source.startArray(); + } + source.startObject(); + pathCombined = pathCombined + name + "."; + } + String name = randomAlphaOfLength(10); + pathCombined = pathCombined + name; + source.field(name, randomPoint(lat, lon)); + for (int i = layers - 2; i >= 0; i--) { + source.endObject(); + if (arrayWrap[i]) { + source.endArray(); + } + } + } + source.endObject(); + BytesReference sourceRef = BytesReference.bytes(source); + hit.sourceRef(sourceRef); + + FieldHitExtractor fe = new FieldHitExtractor(pathCombined, DataType.GEO_POINT, UTC, false); + assertEquals(new GeoShape(lon, lat), fe.extract(hit)); + } + + public void testMultipleGeoPointExtractionFromSource() throws IOException { + double lat = randomDoubleBetween(-90, 90, true); + double lon = randomDoubleBetween(-180, 180, true); + SearchHit hit = new SearchHit(1); + String fieldName = randomAlphaOfLength(5); + int arraySize = randomIntBetween(2, 4); + XContentBuilder source = JsonXContent.contentBuilder(); + source.startObject(); { + source.startArray(fieldName); + source.value(randomPoint(lat, lon)); + for (int i = 1; i < arraySize; i++) { + source.value(randomPoint(lat, lon)); + } + source.endArray(); + } + source.endObject(); + BytesReference sourceRef = BytesReference.bytes(source); + hit.sourceRef(sourceRef); + + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, false); + SqlException ex = expectThrows(SqlException.class, () -> fe.extract(hit)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, false, true); + assertEquals(new GeoShape(lon, lat), lenientFe.extract(hit)); + } + + public void testGeoPointExtractionFromDocValues() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true); + SearchHit hit = new SearchHit(1); + DocumentField field = new DocumentField(fieldName, singletonList("2, 1")); + hit.fields(singletonMap(fieldName, field)); + assertEquals(new GeoShape(1, 2), fe.extract(hit)); + hit = new SearchHit(1); + assertNull(fe.extract(hit)); + } + + public void testGeoPointExtractionFromMultipleDocValues() { + String fieldName = randomAlphaOfLength(5); + SearchHit hit = new SearchHit(1); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true); + + hit.fields(singletonMap(fieldName, new DocumentField(fieldName, Arrays.asList("2,1", "3,4")))); + SqlException ex = expectThrows(SqlException.class, () -> fe.extract(hit)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_POINT, UTC, true, true); + assertEquals(new GeoShape(1, 2), lenientFe.extract(hit)); + } + private FieldHitExtractor getFieldHitExtractor(String fieldName, boolean useDocValue) { return new FieldHitExtractor(fieldName, null, UTC, useDocValue); } @@ -471,4 +591,18 @@ private Object randomNonNullValue() { ESTestCase::randomDouble)); return value.get(); } + + private Object randomPoint(double lat, double lon) { + Supplier value = randomFrom(Arrays.asList( + () -> lat + "," + lon, + () -> Arrays.asList(lon, lat), + () -> { + Map map1 = new HashMap<>(); + map1.put("lat", lat); + map1.put("lon", lon); + return map1; + } + )); + return value.get(); + } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java index 6581781c70072..0156d8fdfb59a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java @@ -29,9 +29,9 @@ public void testDatabaseFunctionOutput() { EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true)); Analyzer analyzer = new Analyzer( new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, - Protocol.PAGE_TIMEOUT, null, + Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), randomAlphaOfLength(10), - null, clusterName, randomBoolean()), + null, clusterName, randomBoolean(), randomBoolean()), new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics()) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java index 190bc273d7a5e..f8b3ed1976450 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java @@ -30,7 +30,8 @@ public void testNoUsernameFunctionOutput() { new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), randomAlphaOfLength(10), - null, randomAlphaOfLengthBetween(1, 15), randomBoolean()), + null, randomAlphaOfLengthBetween(1, 15), + randomBoolean(), randomBoolean()), new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics()) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java new file mode 100644 index 0000000000000..07cc6171cf013 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoProcessorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoProcessor.GeoOperation; + +import java.io.IOException; + +public class GeoProcessorTests extends AbstractWireSerializingTestCase { + public static GeoProcessor randomGeoProcessor() { + return new GeoProcessor(randomFrom(GeoOperation.values())); + } + + @Override + protected GeoProcessor createTestInstance() { + return randomGeoProcessor(); + } + + @Override + protected Reader instanceReader() { + return GeoProcessor::new; + } + + @Override + protected GeoProcessor mutateInstance(GeoProcessor instance) throws IOException { + return new GeoProcessor(randomValueOtherThan(instance.processor(), () -> randomFrom(GeoOperation.values()))); + } + + public void testApplyAsWKT() throws Exception { + assertEquals("point (10.0 20.0)", new GeoProcessor(GeoOperation.ASWKT).process(new GeoShape(10, 20))); + assertEquals("point (10.0 20.0)", new GeoProcessor(GeoOperation.ASWKT).process(new GeoShape("POINT (10 20)"))); + } + + public void testApplyGeometryType() throws Exception { + assertEquals("POINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape(10, 20))); + assertEquals("POINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("POINT (10 20)"))); + assertEquals("MULTIPOINT", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals("LINESTRING", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals("POLYGON", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals("MULTILINESTRING", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + assertEquals("MULTIPOLYGON", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + assertEquals("ENVELOPE", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + assertEquals("GEOMETRYCOLLECTION", new GeoProcessor(GeoOperation.GEOMETRY_TYPE).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + } + + + public void testApplyGetXYZ() throws Exception { + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape(10, 20))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape(10, 20))); + assertNull(new GeoProcessor(GeoOperation.Z).process(new GeoShape(10, 20))); + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("POINT (10 20)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("POINT (10 20)"))); + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(30.0, new GeoProcessor(GeoOperation.Z).process(new GeoShape("POINT (10 20 30)"))); + assertEquals(2.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("multipoint (2.0 1.0)"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("LINESTRING (3.0 1.0, 4.0 2.0)"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process( + new GeoShape("multilinestring ((3.0 1.0, 4.0 2.0), (2.0 1.0, 5.0 6.0))"))); + // minX minX, maxX, maxY, minY + assertEquals(10.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + // minY minX, maxX, maxY, minY + assertEquals(30.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("bbox (10.0, 20.0, 40.0, 30.0)"))); + assertEquals(20.0, new GeoProcessor(GeoOperation.X).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + assertEquals(10.0, new GeoProcessor(GeoOperation.Y).process( + new GeoShape("geometrycollection (point (20.0 10.0),point (1.0 2.0))"))); + } + + public void testApplyGetXYZToPolygons() throws Exception { + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertNull(new GeoProcessor(GeoOperation.Z).process(new GeoShape("polygon ((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0))"))); + assertEquals(5.0, new GeoProcessor(GeoOperation.Z).process( + new GeoShape("polygon ((3.0 1.0 5.0, 4.0 2.0 6.0, 4.0 3.0 7.0, 3.0 1.0 5.0))"))); + assertEquals(3.0, new GeoProcessor(GeoOperation.X).process(new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + assertEquals(1.0, new GeoProcessor(GeoOperation.Y).process(new GeoShape("multipolygon (((3.0 1.0, 4.0 2.0, 4.0 3.0, 3.0 1.0)))"))); + } + + public void testApplyNull() { + for (GeoOperation op : GeoOperation.values()) { + GeoProcessor proc = new GeoProcessor(op); + assertNull(proc.process(null)); + } + } + + public void testTypeCheck() { + GeoProcessor proc = new GeoProcessor(GeoOperation.ASWKT); + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> proc.process("string")); + assertEquals("A geo_point or geo_shape is required; received [string]", siae.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java new file mode 100644 index 0000000000000..9f78f8b3df43b --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StDistanceProcessorTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; + +import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l; +import static org.elasticsearch.xpack.sql.tree.Source.EMPTY; +import static org.hamcrest.Matchers.instanceOf; + +public class StDistanceProcessorTests extends AbstractWireSerializingTestCase { + + public StDistanceProcessor createTestInstance() { + return new StDistanceProcessor( + constantPoint(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true)), + constantPoint(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true)) + ); + } + + public static Processor constantPoint(double lon, double lat) { + return new ChainingProcessor(new ConstantProcessor("point (" + lon + " " + lat + ")"), StWkttosqlProcessor.INSTANCE); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Processors.getNamedWriteables()); + } + + public void testApply() { + StDistanceProcessor proc = new StDistanceProcessor(constantPoint(10, 20), constantPoint(30, 40)); + Object result = proc.process(null); + assertThat(result, instanceOf(Double.class)); + assertEquals(GeoUtils.arcDistance(20, 10, 40, 30), (double) result, 0.000001); + } + + public void testNullHandling() { + assertNull(new StDistance(EMPTY, l(new GeoShape(1, 2)), l(null)).makePipe().asProcessor().process(null)); + assertNull(new StDistance(EMPTY, l(null), l(new GeoShape(1, 2))).makePipe().asProcessor().process(null)); + } + + public void testTypeCheck() { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, + () -> new StDistance(EMPTY, l("foo"), l(new GeoShape(1, 2))).makePipe().asProcessor().process(null)); + assertEquals("A geo_point or geo_shape with type point is required; received [foo]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, + () -> new StDistance(EMPTY, l(new GeoShape(1, 2)), l("bar")).makePipe().asProcessor().process(null)); + assertEquals("A geo_point or geo_shape with type point is required; received [bar]", siae.getMessage()); + } + + @Override + protected Writeable.Reader instanceReader() { + return StDistanceProcessor::new; + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java new file mode 100644 index 0000000000000..fc7b33ae905d7 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.geo; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import static org.hamcrest.Matchers.instanceOf; + +public class StWkttosqlProcessorTests extends ESTestCase { + public static StWkttosqlProcessor randomStWkttosqlProcessor() { + return new StWkttosqlProcessor(); + } + + public void testApply() { + StWkttosqlProcessor proc = new StWkttosqlProcessor(); + assertNull(proc.process(null)); + Object result = proc.process("POINT (10 20)"); + assertThat(result, instanceOf(GeoShape.class)); + GeoShape geoShape = (GeoShape) result; + assertEquals("point (10.0 20.0)", geoShape.toString()); + } + + public void testTypeCheck() { + StWkttosqlProcessor procPoint = new StWkttosqlProcessor(); + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process(42)); + assertEquals("A string is required; received [42]", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("some random string")); + assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape value", siae.getMessage()); + + + siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); + assertEquals("Cannot parse [point (10 10] as a geo_shape value", siae.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index b2e5eebe5ea5f..93f6515f71062 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.IsoWeekOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; +import org.elasticsearch.xpack.sql.expression.function.scalar.geo.StDistance; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ATan; @@ -195,7 +196,7 @@ private static FieldAttribute getFieldAttribute(String name) { } public void testPruneSubqueryAliases() { - ShowTables s = new ShowTables(EMPTY, null, null); + ShowTables s = new ShowTables(EMPTY, null, null, false); SubQueryAlias plan = new SubQueryAlias(EMPTY, s, "show"); LogicalPlan result = new PruneSubqueryAliases().apply(plan); assertEquals(result, s); @@ -764,6 +765,15 @@ public void testLiteralsOnTheRight() { assertEquals(FIVE, nullEquals.right()); } + public void testLiteralsOnTheRightInStDistance() { + Alias a = new Alias(EMPTY, "a", L(10)); + Expression result = new BooleanLiteralsOnTheRight().rule(new StDistance(EMPTY, FIVE, a)); + assertTrue(result instanceof StDistance); + StDistance sd = (StDistance) result; + assertEquals(a, sd.left()); + assertEquals(FIVE, sd.right()); + } + public void testBoolSimplifyNotIsNullAndNotIsNotNull() { BooleanSimplification simplification = new BooleanSimplification(); assertTrue(simplification.rule(new Not(EMPTY, new IsNull(EMPTY, ONE))) instanceof IsNotNull); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java index d1e05b6ec532c..f9b0fc18bca52 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.sql.parser; import com.google.common.base.Joiner; - import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; @@ -18,19 +17,10 @@ import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MultiMatchQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add; -import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanExpressionContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryPrimaryDefaultContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryTermContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StatementContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StatementDefaultContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ValueExpressionContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ValueExpressionDefaultContext; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; import org.elasticsearch.xpack.sql.plan.logical.Project; -import org.elasticsearch.xpack.sql.plan.logical.With; import java.util.ArrayList; import java.util.List; @@ -198,86 +188,44 @@ public void testMultiMatchQuery() { assertThat(mmqp.optionMap(), hasEntry("fuzzy_rewrite", "scoring_boolean")); } - public void testLimitToPreventStackOverflowFromLongListOfQuotedIdentifiers() { - // Create expression in the form of "t"."field","t"."field", ... - - // 200 elements is ok - new SqlParser().createStatement("SELECT " + - Joiner.on(",").join(nCopies(200, "\"t\".\"field\"")) + " FROM t"); - - // 201 elements parser's "circuit breaker" is triggered - ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement("SELECT " + - Joiner.on(",").join(nCopies(201, "\"t\".\"field\"")) + " FROM t")); - assertEquals("line 1:2409: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); - } - - public void testLimitToPreventStackOverflowFromLongListOfUnQuotedIdentifiers() { - // Create expression in the form of t.field,t.field, ... - - // 250 elements is ok - new SqlParser().createStatement("SELECT " + - Joiner.on(",").join(nCopies(200, "t.field")) + " FROM t"); - - // 251 elements parser's "circuit breaker" is triggered - ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement("SELECT " + - Joiner.on(",").join(nCopies(201, "t.field")) + " FROM t")); - assertEquals("line 1:1609: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); - } - - public void testLimitToPreventStackOverflowFromLargeUnaryBooleanExpression() { - // Create expression in the form of NOT(NOT(NOT ... (b) ...) - - // 99 elements is ok - new SqlParser().createExpression( - Joiner.on("").join(nCopies(99, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(99, ")")))); - - // 100 elements parser's "circuit breaker" is triggered - ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( - Joiner.on("").join(nCopies(100, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(100, ")"))))); - assertEquals("line 1:402: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); - } - public void testLimitToPreventStackOverflowFromLargeBinaryBooleanExpression() { // Create expression in the form of a = b OR a = b OR ... a = b - // 100 elements is ok - new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(100, "a = b"))); + // 1000 elements is ok + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(1000, "a = b"))); - // 101 elements parser's "circuit breaker" is triggered + // 5000 elements cause stack overflow ParsingException e = expectThrows(ParsingException.class, () -> - new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(101, "a = b")))); - assertEquals("line 1:902: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(5000, "a = b")))); + assertThat(e.getMessage(), + startsWith("line -1:0: SQL statement is too large, causing stack overflow when generating the parsing tree: [")); } public void testLimitToPreventStackOverflowFromLargeUnaryArithmeticExpression() { // Create expression in the form of abs(abs(abs ... (i) ...) - // 199 elements is ok + // 200 elements is ok new SqlParser().createExpression( - Joiner.on("").join(nCopies(199, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(199, ")")))); + Joiner.on("").join(nCopies(200, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(200, ")")))); - // 200 elements parser's "circuit breaker" is triggered + // 5000 elements cause stack overflow ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( - Joiner.on("").join(nCopies(200, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(200, ")"))))); - assertEquals("line 1:802: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); + Joiner.on("").join(nCopies(1000, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(1000, ")"))))); + assertThat(e.getMessage(), + startsWith("line -1:0: SQL statement is too large, causing stack overflow when generating the parsing tree: [")); } public void testLimitToPreventStackOverflowFromLargeBinaryArithmeticExpression() { // Create expression in the form of a + a + a + ... + a - // 200 elements is ok - new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(200, "a"))); + // 1000 elements is ok + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(1000, "a"))); - // 201 elements parser's "circuit breaker" is triggered + // 5000 elements cause stack overflow ParsingException e = expectThrows(ParsingException.class, () -> - new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(201, "a")))); - assertEquals("line 1:802: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(5000, "a")))); + assertThat(e.getMessage(), + startsWith("line -1:0: SQL statement is too large, causing stack overflow when generating the parsing tree: [")); } public void testLimitToPreventStackOverflowFromLargeSubselectTree() { @@ -289,92 +237,13 @@ public void testLimitToPreventStackOverflowFromLargeSubselectTree() { .concat("t") .concat(Joiner.on("").join(nCopies(199, ")")))); - // 201 elements parser's "circuit breaker" is triggered + // 500 elements cause stack overflow ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( - Joiner.on(" (").join(nCopies(201, "SELECT * FROM")) + Joiner.on(" (").join(nCopies(500, "SELECT * FROM")) .concat("t") - .concat(Joiner.on("").join(nCopies(200, ")"))))); - assertEquals("line 1:3002: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); - } - - public void testLimitToPreventStackOverflowFromLargeComplexSubselectTree() { - // Test with queries in the form of `SELECT true OR true OR .. FROM (SELECT true OR true OR... FROM (... t) ...) - - new SqlParser().createStatement( - Joiner.on(" (").join(nCopies(20, "SELECT ")). - concat(Joiner.on(" OR ").join(nCopies(180, "true"))).concat(" FROM") - .concat("t").concat(Joiner.on("").join(nCopies(19, ")")))); - - ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( - Joiner.on(" (").join(nCopies(20, "SELECT ")). - concat(Joiner.on(" OR ").join(nCopies(190, "true"))).concat(" FROM") - .concat("t").concat(Joiner.on("").join(nCopies(19, ")"))))); - assertEquals("line 1:1628: SQL statement too large; halt parsing to prevent memory errors (stopped at depth 200)", - e.getMessage()); - } - - public void testLimitStackOverflowForInAndLiteralsIsNotApplied() { - int noChildren = 10_000; - LogicalPlan plan = parseStatement("SELECT * FROM t WHERE a IN(" + - Joiner.on(",").join(nCopies(noChildren, "a + 10")) + "," + - Joiner.on(",").join(nCopies(noChildren, "-(-a - 10)")) + "," + - Joiner.on(",").join(nCopies(noChildren, "20")) + "," + - Joiner.on(",").join(nCopies(noChildren, "-20")) + "," + - Joiner.on(",").join(nCopies(noChildren, "20.1234")) + "," + - Joiner.on(",").join(nCopies(noChildren, "-20.4321")) + "," + - Joiner.on(",").join(nCopies(noChildren, "1.1234E56")) + "," + - Joiner.on(",").join(nCopies(noChildren, "-1.4321E-65")) + "," + - Joiner.on(",").join(nCopies(noChildren, "'foo'")) + "," + - Joiner.on(",").join(nCopies(noChildren, "'bar'")) + ")"); - - assertEquals(With.class, plan.getClass()); - assertEquals(Project.class, ((With) plan).child().getClass()); - assertEquals(Filter.class, ((Project) ((With) plan).child()).child().getClass()); - Filter filter = (Filter) ((Project) ((With) plan).child()).child(); - assertEquals(In.class, filter.condition().getClass()); - In in = (In) filter.condition(); - assertEquals("?a", in.value().toString()); - assertEquals(noChildren * 2 + 8, in.list().size()); - assertThat(in.list().get(0).toString(), startsWith("Add[?a,10]#")); - assertThat(in.list().get(noChildren).toString(), startsWith("Neg[Sub[Neg[?a]#")); - assertEquals("20", in.list().get(noChildren * 2).toString()); - assertEquals("-20", in.list().get(noChildren * 2 + 1).toString()); - assertEquals("20.1234", in.list().get(noChildren * 2 + 2).toString()); - assertEquals("-20.4321", in.list().get(noChildren * 2 + 3).toString()); - assertEquals("1.1234E56", in.list().get(noChildren * 2 + 4).toString()); - assertEquals("-1.4321E-65", in.list().get(noChildren * 2 + 5).toString()); - assertEquals("'foo'=foo", in.list().get(noChildren * 2 + 6).toString()); - assertEquals("'bar'=bar", in.list().get(noChildren * 2 + 7).toString()); - } - - public void testDecrementOfDepthCounter() { - SqlParser.CircuitBreakerListener cbl = new SqlParser.CircuitBreakerListener(); - StatementContext sc = new StatementContext(); - QueryTermContext qtc = new QueryTermContext(); - ValueExpressionContext vec = new ValueExpressionContext(); - BooleanExpressionContext bec = new BooleanExpressionContext(); - - cbl.enterEveryRule(sc); - cbl.enterEveryRule(sc); - cbl.enterEveryRule(qtc); - cbl.enterEveryRule(qtc); - cbl.enterEveryRule(qtc); - cbl.enterEveryRule(vec); - cbl.enterEveryRule(bec); - cbl.enterEveryRule(bec); - - cbl.exitEveryRule(new StatementDefaultContext(sc)); - cbl.exitEveryRule(new StatementDefaultContext(sc)); - cbl.exitEveryRule(new QueryPrimaryDefaultContext(qtc)); - cbl.exitEveryRule(new QueryPrimaryDefaultContext(qtc)); - cbl.exitEveryRule(new ValueExpressionDefaultContext(vec)); - cbl.exitEveryRule(new SqlBaseParser.BooleanDefaultContext(bec)); - - assertEquals(0, cbl.depthCounts().get(SqlBaseParser.StatementContext.class.getSimpleName())); - assertEquals(1, cbl.depthCounts().get(SqlBaseParser.QueryTermContext.class.getSimpleName())); - assertEquals(0, cbl.depthCounts().get(SqlBaseParser.ValueExpressionContext.class.getSimpleName())); - assertEquals(1, cbl.depthCounts().get(SqlBaseParser.BooleanExpressionContext.class.getSimpleName())); + .concat(Joiner.on("").join(nCopies(499, ")"))))); + assertThat(e.getMessage(), + startsWith("line -1:0: SQL statement is too large, causing stack overflow when generating the parsing tree: [")); } private LogicalPlan parseStatement(String sql) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java index f8cfd179fd0f2..8dda75b889576 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/UnresolvedRelationTests.java @@ -7,8 +7,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.plan.TableIdentifier; -import org.elasticsearch.xpack.sql.tree.SourceTests; import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.tree.SourceTests; import java.util.ArrayList; import java.util.List; @@ -23,17 +23,19 @@ public void testEqualsAndHashCode() { TableIdentifier table = new TableIdentifier(source, randomAlphaOfLength(5), randomAlphaOfLength(5)); String alias = randomBoolean() ? null : randomAlphaOfLength(5); String unresolvedMessage = randomAlphaOfLength(5); - UnresolvedRelation relation = new UnresolvedRelation(source, table, alias, unresolvedMessage); + UnresolvedRelation relation = new UnresolvedRelation(source, table, alias, randomBoolean(), unresolvedMessage); List> mutators = new ArrayList<>(); mutators.add(r -> new UnresolvedRelation( SourceTests.mutate(r.source()), r.table(), r.alias(), + r.frozen(), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( r.source(), new TableIdentifier(r.source(), r.table().cluster(), r.table().index() + "m"), r.alias(), + r.frozen(), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( r.source(), @@ -41,14 +43,16 @@ public void testEqualsAndHashCode() { randomValueOtherThanMany( a -> Objects.equals(a, r.alias()), () -> randomBoolean() ? null : randomAlphaOfLength(5)), + r.frozen(), r.unresolvedMessage())); mutators.add(r -> new UnresolvedRelation( r.source(), r.table(), r.alias(), + r.frozen(), randomValueOtherThan(r.unresolvedMessage(), () -> randomAlphaOfLength(5)))); checkEqualsAndHashCode(relation, - r -> new UnresolvedRelation(r.source(), r.table(), r.alias(), r.unresolvedMessage()), + r -> new UnresolvedRelation(r.source(), r.table(), r.alias(), r.frozen(), r.unresolvedMessage()), r -> randomFrom(mutators).apply(r)); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java index bb4fb02ea7e85..9c8c32689b70e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java @@ -37,6 +37,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.action.ActionListener.wrap; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -47,7 +48,7 @@ public class SysColumnsTests extends ESTestCase { private final SqlParser parser = new SqlParser(); private final Map mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json", true); - private final IndexInfo index = new IndexInfo("test_emp", IndexType.INDEX); + private final IndexInfo index = new IndexInfo("test_emp", IndexType.STANDARD_INDEX); private final IndexInfo alias = new IndexInfo("alias", IndexType.ALIAS); @@ -56,7 +57,7 @@ public void testSysColumns() { SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, randomValueOtherThanMany(Mode::isDriver, () -> randomFrom(Mode.values()))); // nested fields are ignored - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -143,7 +144,7 @@ public void testSysColumnsInOdbcMode() { List> rows = new ArrayList<>(); SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, Mode.ODBC); - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -232,7 +233,7 @@ public void testSysColumnsInOdbcMode() { assertEquals(Short.class, nullable(row).getClass()); assertEquals(Short.class, sqlDataType(row).getClass()); assertEquals(Short.class, sqlDataTypeSub(row).getClass()); - + row = rows.get(9); assertEquals("some.ambiguous", name(row)); assertEquals((short) Types.VARCHAR, sqlType(row)); @@ -278,7 +279,7 @@ public void testSysColumnsInJdbcMode() { List> rows = new ArrayList<>(); SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, Mode.JDBC); - assertEquals(13, rows.size()); + assertEquals(15, rows.size()); assertEquals(24, rows.get(0).size()); List row = rows.get(0); @@ -462,7 +463,7 @@ public void testSysColumnsNoArg() throws Exception { public void testSysColumnsWithCatalogWildcard() throws Exception { executeCommand("SYS COLUMNS CATALOG 'cluster' TABLE LIKE 'test' LIKE '%'", emptyList(), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -475,7 +476,7 @@ public void testSysColumnsWithCatalogWildcard() throws Exception { public void testSysColumnsWithMissingCatalog() throws Exception { executeCommand("SYS COLUMNS TABLE LIKE 'test' LIKE '%'", emptyList(), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -488,7 +489,7 @@ public void testSysColumnsWithMissingCatalog() throws Exception { public void testSysColumnsWithNullCatalog() throws Exception { executeCommand("SYS COLUMNS CATALOG ? TABLE LIKE 'test' LIKE '%'", singletonList(new SqlTypedParamValue("keyword", null)), r -> { - assertEquals(13, r.size()); + assertEquals(14, r.size()); assertEquals(CLUSTER_NAME, r.column(0)); assertEquals("test", r.column(2)); assertEquals("bool", r.column(3)); @@ -509,9 +510,9 @@ private void executeCommand(String sql, List params, Consume EsIndex test = new EsIndex("test", mapping); doAnswer(invocation -> { - ((ActionListener) invocation.getArguments()[2]).onResponse(IndexResolution.valid(test)); + ((ActionListener) invocation.getArguments()[3]).onResponse(IndexResolution.valid(test)); return Void.TYPE; - }).when(resolver).resolveAsMergedMapping(any(), any(), any()); + }).when(resolver).resolveAsMergedMapping(any(), any(), anyBoolean(), any()); tuple.v1().execute(tuple.v2(), wrap(consumer::accept, ex -> fail(ex.getMessage()))); } @@ -528,4 +529,4 @@ private Tuple sql(String sql, List para SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null, null); return new Tuple<>(cmd, session); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java index d7a24681329cb..d4db97aba09cf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java @@ -19,13 +19,17 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.plan.logical.command.Command; +import org.elasticsearch.xpack.sql.proto.Mode; +import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; +import org.elasticsearch.xpack.sql.util.DateUtils; import java.util.Comparator; import java.util.Iterator; @@ -48,8 +52,12 @@ public class SysTablesTests extends ESTestCase { private final SqlParser parser = new SqlParser(); private final Map mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json", true); - private final IndexInfo index = new IndexInfo("test", IndexType.INDEX); + private final IndexInfo index = new IndexInfo("test", IndexType.STANDARD_INDEX); private final IndexInfo alias = new IndexInfo("alias", IndexType.ALIAS); + private final IndexInfo frozen = new IndexInfo("frozen", IndexType.FROZEN_INDEX); + + private final Configuration FROZEN_CFG = new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null, null, false, true); // // catalog enumeration @@ -107,7 +115,7 @@ public void testSysTablesTypesEnumeration() throws Exception { executeCommand("SYS TABLES CATALOG LIKE '' LIKE '' TYPE '%'", r -> { assertEquals(2, r.size()); - Iterator it = IndexType.VALID.stream().sorted(Comparator.comparing(IndexType::toSql)).iterator(); + Iterator it = IndexType.VALID_REGULAR.stream().sorted(Comparator.comparing(IndexType::toSql)).iterator(); for (int t = 0; t < r.size(); t++) { assertEquals(it.next().toSql(), r.column(3)); @@ -149,6 +157,20 @@ public void testSysTablesNoTypes() throws Exception { }, index, alias); } + public void testSysTablesNoTypesAndFrozen() throws Exception { + executeCommand("SYS TABLES", r -> { + assertEquals(3, r.size()); + assertEquals("frozen", r.column(2)); + assertEquals("BASE TABLE", r.column(3)); + assertTrue(r.advanceRow()); + assertEquals("test", r.column(2)); + assertEquals("BASE TABLE", r.column(3)); + assertTrue(r.advanceRow()); + assertEquals("alias", r.column(2)); + assertEquals("VIEW", r.column(3)); + }, FROZEN_CFG, index, alias, frozen); + } + public void testSysTablesWithLegacyTypes() throws Exception { executeCommand("SYS TABLES TYPE 'TABLE', 'ALIAS'", r -> { assertEquals(2, r.size()); @@ -171,6 +193,20 @@ public void testSysTablesWithProperTypes() throws Exception { }, index, alias); } + public void testSysTablesWithProperTypesAndFrozen() throws Exception { + executeCommand("SYS TABLES TYPE 'BASE TABLE', 'ALIAS'", r -> { + assertEquals(3, r.size()); + assertEquals("frozen", r.column(2)); + assertEquals("BASE TABLE", r.column(3)); + assertTrue(r.advanceRow()); + assertEquals("test", r.column(2)); + assertEquals("BASE TABLE", r.column(3)); + assertTrue(r.advanceRow()); + assertEquals("alias", r.column(2)); + assertEquals("VIEW", r.column(3)); + }, index, frozen, alias); + } + public void testSysTablesPattern() throws Exception { executeCommand("SYS TABLES LIKE '%'", r -> { assertEquals(2, r.size()); @@ -213,6 +249,15 @@ public void testSysTablesOnlyIndices() throws Exception { }, index); } + public void testSysTablesOnlyIndicesWithFrozen() throws Exception { + executeCommand("SYS TABLES LIKE 'test' TYPE 'BASE TABLE'", r -> { + assertEquals(2, r.size()); + assertEquals("frozen", r.column(2)); + assertTrue(r.advanceRow()); + assertEquals("test", r.column(2)); + }, index, frozen); + } + public void testSysTablesOnlyIndicesInLegacyMode() throws Exception { executeCommand("SYS TABLES LIKE 'test' TYPE 'TABLE'", r -> { assertEquals(1, r.size()); @@ -303,7 +348,7 @@ private SqlTypedParamValue param(Object value) { return new SqlTypedParamValue(DataTypes.fromJava(value).typeName, value); } - private Tuple sql(String sql, List params) { + private Tuple sql(String sql, List params, Configuration cfg) { EsIndex test = new EsIndex("test", mapping); Analyzer analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), IndexResolution.valid(test), new Verifier(new Metrics())); @@ -312,7 +357,7 @@ private Tuple sql(String sql, List para IndexResolver resolver = mock(IndexResolver.class); when(resolver.clusterName()).thenReturn(CLUSTER_NAME); - SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null, null); + SqlSession session = new SqlSession(cfg, null, null, resolver, null, null, null, null, null); return new Tuple<>(cmd, session); } @@ -320,10 +365,19 @@ private void executeCommand(String sql, Consumer consumer, IndexIn executeCommand(sql, emptyList(), consumer, infos); } - @SuppressWarnings({ "unchecked", "rawtypes" }) + private void executeCommand(String sql, Consumer consumer, Configuration cfg, IndexInfo... infos) throws Exception { + executeCommand(sql, emptyList(), consumer, cfg, infos); + } + private void executeCommand(String sql, List params, Consumer consumer, IndexInfo... infos) throws Exception { - Tuple tuple = sql(sql, params); + executeCommand(sql, params, consumer, TestUtils.TEST_CFG, infos); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void executeCommand(String sql, List params, Consumer consumer, Configuration cfg, + IndexInfo... infos) throws Exception { + Tuple tuple = sql(sql, params, cfg); IndexResolver resolver = tuple.v2().indexResolver(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 4a8da68a1d51e..805268dd5b687 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -48,7 +48,7 @@ public void testSysTypes() { "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", - "UNSUPPORTED", "OBJECT", "NESTED"); + "GEO_SHAPE", "GEO_POINT", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(null, wrap(r -> { assertEquals(19, r.columnCount()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 0543e65d4ae46..693840bd65c34 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.sql.querydsl.agg.GroupByDateHistogram; import org.elasticsearch.xpack.sql.querydsl.query.BoolQuery; import org.elasticsearch.xpack.sql.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.sql.querydsl.query.GeoDistanceQuery; import org.elasticsearch.xpack.sql.querydsl.query.NotQuery; import org.elasticsearch.xpack.sql.querydsl.query.Query; import org.elasticsearch.xpack.sql.querydsl.query.RangeQuery; @@ -65,6 +66,7 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation.PI; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; public class QueryTranslatorTests extends ESTestCase { @@ -496,7 +498,7 @@ public void testTranslateMathFunction_HavingClause_Painless() { assertNull(translation.query); AggFilter aggFilter = translation.aggFilter; assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.gt(InternalSqlScriptUtils." + - operation.name().toLowerCase(Locale.ROOT) + "(params.a0),params.v0))", + operation.name().toLowerCase(Locale.ROOT) + "(params.a0),params.v0))", aggFilter.scriptTemplate().toString()); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=max(int){a->")); assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); @@ -561,6 +563,109 @@ public void testGroupByAndHavingWithFunctionOnTopOfAggregation() { assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=10}]")); } + public void testTranslateStAsWktForPoints() { + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_AsWKT(point) = 'point (10 20)'"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, true); + assertNull(translation.query); + AggFilter aggFilter = translation.aggFilter; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(" + + "InternalSqlScriptUtils.stAswkt(InternalSqlScriptUtils.geoDocValue(doc,params.v0))," + + "params.v1)" + + ")", + aggFilter.scriptTemplate().toString()); + assertEquals("[{v=point}, {v=point (10 20)}]", aggFilter.scriptTemplate().params().toString()); + } + + public void testTranslateStWktToSql() { + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_WKTToSQL(keyword) = ST_WKTToSQL('point (10 20)')"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, true); + assertNull(translation.query); + AggFilter aggFilter = translation.aggFilter; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(" + + "InternalSqlScriptUtils.eq(InternalSqlScriptUtils.stWktToSql(" + + "InternalSqlScriptUtils.docValue(doc,params.v0)),InternalSqlScriptUtils.stWktToSql(params.v1)))", + aggFilter.scriptTemplate().toString()); + assertEquals("[{v=keyword}, {v=point (10.0 20.0)}]", aggFilter.scriptTemplate().params().toString()); + } + + public void testTranslateStDistanceToScript() { + String operator = randomFrom(">", ">="); + String operatorFunction = operator.equalsIgnoreCase(">") ? "gt" : "gte"; + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_Distance(point, ST_WKTToSQL('point (10 20)')) " + operator + " 20"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertTrue(translation.query instanceof ScriptQuery); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(" + + "InternalSqlScriptUtils." + operatorFunction + "(" + + "InternalSqlScriptUtils.stDistance(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0),InternalSqlScriptUtils.stWktToSql(params.v1)),params.v2))", + sc.script().toString()); + assertEquals("[{v=point}, {v=point (10.0 20.0)}, {v=20}]", sc.script().params().toString()); + } + + public void testTranslateStDistanceToQuery() { + String operator = randomFrom("<", "<="); + LogicalPlan p = plan("SELECT shape FROM test WHERE ST_Distance(point, ST_WKTToSQL('point (10 20)')) " + operator + " 25"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertTrue(translation.query instanceof GeoDistanceQuery); + GeoDistanceQuery gq = (GeoDistanceQuery) translation.query; + assertEquals("point", gq.field()); + assertEquals(20.0, gq.lat(), 0.00001); + assertEquals(10.0, gq.lon(), 0.00001); + assertEquals(25.0, gq.distance(), 0.00001); + } + + public void testTranslateStXY() { + String dim = randomFrom("X", "Y"); + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_" + dim + "(point) = 10"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertThat(translation.query, instanceOf(ScriptQuery.class)); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(InternalSqlScriptUtils.st" + dim + "(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0)),params.v1))", + sc.script().toString()); + assertEquals("[{v=point}, {v=10}]", sc.script().params().toString()); + } + + public void testTranslateStGeometryType() { + LogicalPlan p = plan("SELECT ST_AsWKT(point) FROM test WHERE ST_GEOMETRYTYPE(point) = 'POINT'"); + assertThat(p, instanceOf(Project.class)); + assertThat(p.children().get(0), instanceOf(Filter.class)); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertNull(translation.aggFilter); + assertThat(translation.query, instanceOf(ScriptQuery.class)); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.eq(InternalSqlScriptUtils.stGeometryType(" + + "InternalSqlScriptUtils.geoDocValue(doc,params.v0)),params.v1))", + sc.script().toString()); + assertEquals("[{v=point}, {v=POINT}]", sc.script().params().toString()); + } + public void testTranslateCoalesce_GroupBy_Painless() { LogicalPlan p = plan("SELECT COALESCE(int, 10) FROM test GROUP BY 1"); assertTrue(p instanceof Aggregate); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 447c820c8e421..7ca4d0058325f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -628,6 +628,10 @@ public void testCommonType() { assertEquals(FLOAT, commonType(FLOAT, INTEGER)); assertEquals(DOUBLE, commonType(DOUBLE, FLOAT)); + // numeric and intervals + assertEquals(INTERVAL_YEAR_TO_MONTH, commonType(INTERVAL_YEAR_TO_MONTH, LONG)); + assertEquals(INTERVAL_HOUR_TO_MINUTE, commonType(INTEGER, INTERVAL_HOUR_TO_MINUTE)); + // dates/datetimes and intervals assertEquals(DATETIME, commonType(DATE, DATETIME)); assertEquals(DATETIME, commonType(DATETIME, DATE)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 65b491fe71a1d..997de6e2f5c53 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -170,8 +170,11 @@ public void testNestedDoc() { public void testGeoField() { Map mapping = loadMapping("mapping-geo.json"); - EsField dt = mapping.get("location"); - assertThat(dt.getDataType().typeName, is("unsupported")); + assertThat(mapping.size(), is(2)); + EsField gp = mapping.get("location"); + assertThat(gp.getDataType().typeName, is("geo_point")); + EsField gs = mapping.get("site"); + assertThat(gs.getDataType().typeName, is("geo_shape")); } public void testIpField() { diff --git a/x-pack/plugin/sql/src/test/resources/mapping-geo.json b/x-pack/plugin/sql/src/test/resources/mapping-geo.json index 3c958ff37edfc..e6e499ef82e83 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-geo.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-geo.json @@ -2,6 +2,9 @@ "properties" : { "location" : { "type" : "geo_point" + }, + "site": { + "type" : "geo_shape" } } } diff --git a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json index d93633f7aced0..c75ecfdc845c0 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-variation.json @@ -44,6 +44,8 @@ } } }, - "foo_type" : { "type" : "foo" } + "foo_type" : { "type" : "foo" }, + "point": {"type" : "geo_point"}, + "shape": {"type" : "geo_shape"} } } diff --git a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json index 448c50e6a9f0a..e46d64a45e88f 100644 --- a/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json +++ b/x-pack/plugin/sql/src/test/resources/mapping-multi-field-with-nested.json @@ -6,6 +6,7 @@ "keyword" : { "type" : "keyword" }, "unsupported" : { "type" : "ip_range" }, "date" : { "type" : "date"}, + "shape": { "type" : "geo_shape" }, "some" : { "properties" : { "dotted" : { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml index e0b35a64fb479..1d4a190b24e14 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml @@ -81,18 +81,28 @@ setup: "group_by": { "airline": {"terms": {"field": "airline"}}, "by-hour": {"date_histogram": {"interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, - "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + "aggs": { + "avg_response": {"avg": {"field": "responsetime"}}, + "time.max": {"max": {"field": "time"}}, + "time.min": {"min": {"field": "time"}} + } } } - match: { preview.0.airline: foo } - match: { preview.0.by-hour: "2017-02-49 00" } - match: { preview.0.avg_response: 1.0 } + - match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" } + - match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" } - match: { preview.1.airline: bar } - match: { preview.1.by-hour: "2017-02-49 01" } - match: { preview.1.avg_response: 42.0 } + - match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" } + - match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" } - match: { preview.2.airline: foo } - match: { preview.2.by-hour: "2017-02-49 01" } - match: { preview.2.avg_response: 42.0 } + - match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" } + - match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" } --- "Test preview transform with invalid config": diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml index fa608cefd1eb3..65945b6ab7429 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml @@ -302,3 +302,65 @@ setup: "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} } } +--- +"Test put config with invalid pivot size": + - do: + catch: /pivot\.max_page_search_size \[5\] must be greater than 10 and less than 10,000/ + data_frame.put_data_frame_transform: + transform_id: "airline-transform" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-dest-index" }, + "pivot": { + "max_page_search_size": 5, + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + } + } + - do: + catch: /pivot\.max_page_search_size \[15000\] must be greater than 10 and less than 10,000/ + data_frame.put_data_frame_transform: + transform_id: "airline-transform" + body: > + { + "source": { "index": "airline-data" }, + "dest": { "index": "airline-dest-index" }, + "pivot": { + "max_page_search_size": 15000, + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} + } + } +--- +"Test creation failures due to duplicate and conflicting field names": + - do: + catch: /duplicate field \[airline\] detected/ + data_frame.put_data_frame_transform: + transform_id: "duplicate-field-transform" + body: > + { + "source": { + "index": "source-index" + }, + "dest": { "index": "dest-index" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"airline": {"avg": {"field": "responsetime"}}} + } + } + - do: + catch: /field \[airline\] cannot be both an object and a field/ + data_frame.put_data_frame_transform: + transform_id: "duplicate-field-transform" + body: > + { + "source": { + "index": "source-index" + }, + "dest": { "index": "dest-index" }, + "pivot": { + "group_by": { "airline": {"terms": {"field": "airline"}}}, + "aggs": {"airline.responsetime": {"avg": {"field": "responsetime"}}} + } + } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml index f1ac07b72340c..a475c3ceadca6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml @@ -100,12 +100,13 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } - do: data_frame.stop_data_frame_transform: transform_id: "airline-transform-start-stop" + wait_for_completion: true - match: { stopped: true } - do: @@ -113,8 +114,8 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.state.indexer_state: "stopped" } - - match: { transforms.0.state.task_state: "stopped" } +# - match: { transforms.0.state.indexer_state: "stopped" } +# - match: { transforms.0.state.task_state: "stopped" } - do: data_frame.start_data_frame_transform: @@ -126,7 +127,7 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } --- @@ -167,7 +168,7 @@ teardown: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } - do: @@ -193,56 +194,15 @@ teardown: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } - do: data_frame.stop_data_frame_transform: transform_id: "airline-transform-start-later" + wait_for_completion: true - match: { stopped: true } - do: data_frame.delete_data_frame_transform: transform_id: "airline-transform-start-later" - ---- -"Test stop all": - - do: - data_frame.put_data_frame_transform: - transform_id: "airline-transform-stop-all" - body: > - { - "source": { "index": "airline-data" }, - "dest": { "index": "airline-data-start-later" }, - "pivot": { - "group_by": { "airline": {"terms": {"field": "airline"}}}, - "aggs": {"avg_response": {"avg": {"field": "responsetime"}}} - } - } - - do: - data_frame.start_data_frame_transform: - transform_id: "airline-transform-stop-all" - - match: { started: true } - - - do: - data_frame.start_data_frame_transform: - transform_id: "airline-transform-start-stop" - - match: { started: true } - - - do: - data_frame.stop_data_frame_transform: - transform_id: "_all" - - match: { stopped: true } - - - do: - data_frame.get_data_frame_transform_stats: - transform_id: "*" - - match: { count: 2 } - - match: { transforms.0.state.indexer_state: "stopped" } - - match: { transforms.0.state.task_state: "stopped" } - - match: { transforms.1.state.indexer_state: "stopped" } - - match: { transforms.1.state.task_state: "stopped" } - - - do: - data_frame.delete_data_frame_transform: - transform_id: "airline-transform-stop-all" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml index 33b0f40863a79..93c942f0733a8 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml @@ -47,18 +47,18 @@ teardown: transform_id: "airline-transform-stats" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.0.state.task_state: "started" } - match: { transforms.0.state.checkpoint: 0 } - - match: { transforms.0.stats.pages_processed: 0 } + - lte: { transforms.0.stats.pages_processed: 1 } - match: { transforms.0.stats.documents_processed: 0 } - match: { transforms.0.stats.documents_indexed: 0 } - - match: { transforms.0.stats.trigger_count: 0 } + - match: { transforms.0.stats.trigger_count: 1 } - match: { transforms.0.stats.index_time_in_ms: 0 } - match: { transforms.0.stats.index_total: 0 } - match: { transforms.0.stats.index_failures: 0 } - - match: { transforms.0.stats.search_time_in_ms: 0 } - - match: { transforms.0.stats.search_total: 0 } + - gte: { transforms.0.stats.search_time_in_ms: 0 } + - lte: { transforms.0.stats.search_total: 1 } - match: { transforms.0.stats.search_failures: 0 } --- @@ -172,7 +172,7 @@ teardown: transform_id: "_all" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats" } - - match: { transforms.0.state.indexer_state: "started" } + - match: { transforms.0.state.indexer_state: "/started|indexing/" } - match: { transforms.1.id: "airline-transform-stats-dos" } - match: { transforms.1.state.indexer_state: "stopped" } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml index 0e052d33281e2..ca04327eab729 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/rollup_search.yml @@ -1244,4 +1244,49 @@ setup: - match: { aggregations.date_histogram#histo.buckets.3.doc_count: 20 } - match: { aggregations.date_histogram#histo.buckets.3.max#the_max.value: 4 } +--- +"Search error against live index": + + - do: + catch: bad_request + rollup.rollup_search: + index: "foo" + body: + size: 0 + aggs: + histo: + date_histogram: + field: "timestamp" + interval: "asdfasdf" + + +--- +"Search error against rollup and live index": + + - do: + catch: bad_request + rollup.rollup_search: + index: "foo*" + body: + size: 0 + aggs: + histo: + date_histogram: + field: "timestamp" + interval: "asdfasdf" + +--- +"Search error no matching indices": + + - do: + catch: /Must specify at least one concrete index/ + rollup.rollup_search: + index: "bar*" + body: + size: 0 + aggs: + histo: + date_histogram: + field: "timestamp" + interval: "1h" diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index a44665ca25c0f..1585488bdb5b3 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -31,6 +31,9 @@ dependencies { compileOnly project(path: ':plugins:transport-nio', configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile "org.elasticsearch.plugin:x-pack-ilm:${version}" // watcher deps @@ -47,7 +50,7 @@ dependencies { } // classes are missing, e.g. com.ibm.icu.lang.UCharacter -thirdPartyAudit { +thirdPartyAudit { ignoreViolations ( // uses internal java api: sun.misc.Unsafe 'com.google.common.cache.Striped64', diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java index 9f5027f7a0f7a..be844d5d1e428 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java @@ -79,7 +79,8 @@ protected String getOrigin() { } public static boolean validate(ClusterState state) { - return state.getMetaData().getTemplates().containsKey(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME) && + return (state.getMetaData().getTemplates().containsKey(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME) || + state.getMetaData().getTemplates().containsKey(WatcherIndexTemplateRegistryField.HISTORY_TEMPLATE_NAME_NO_ILM)) && state.getMetaData().getTemplates().containsKey(WatcherIndexTemplateRegistryField.TRIGGERED_TEMPLATE_NAME) && state.getMetaData().getTemplates().containsKey(WatcherIndexTemplateRegistryField.WATCHES_TEMPLATE_NAME); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index bdaa2377fd1d7..377ffa67c9a8f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -5,12 +5,15 @@ */ package org.elasticsearch.xpack.watcher.actions.webhook; +import com.sun.net.httpserver.HttpsServer; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.TestsSSLService; import org.elasticsearch.xpack.core.watcher.history.WatchRecord; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; @@ -26,6 +29,9 @@ import org.junit.Before; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; @@ -51,6 +57,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put("xpack.http.ssl.key", keyPath) .put("xpack.http.ssl.certificate", certPath) .put("xpack.http.ssl.keystore.password", "testnode") + .putList("xpack.http.ssl.supported_protocols", getProtocols()) .build(); } @@ -131,4 +138,21 @@ public void testHttpsAndBasicAuth() throws Exception { assertThat(webServer.requests().get(0).getBody(), equalTo("{key=value}")); assertThat(webServer.requests().get(0).getHeader("Authorization"), equalTo("Basic X3VzZXJuYW1lOl9wYXNzd29yZA==")); } + + /** + * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to + * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK + */ + private static List getProtocols() { + if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + return List.of("TLSv1.2"); + } else { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + return List.of("TLSv1.2"); + } + } + return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 6bb607d6805a3..6e9957a404d9a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.watcher.common.http; import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import com.sun.net.httpserver.HttpsServer; import org.apache.http.HttpHeaders; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.config.RequestConfig; @@ -13,6 +14,7 @@ import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.MockSecureSettings; @@ -28,6 +30,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.test.junit.annotations.Network; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.TestsSSLService; import org.elasticsearch.xpack.core.ssl.VerificationMode; @@ -44,9 +47,12 @@ import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -190,6 +196,7 @@ public void testHttps() throws Exception { Settings settings2 = Settings.builder() .put("xpack.security.http.ssl.key", keyPath) .put("xpack.security.http.ssl.certificate", certPath) + .putList("xpack.security.http.ssl.supported_protocols", getProtocols()) .setSecureSettings(secureSettings) .build(); @@ -218,6 +225,7 @@ public void testHttpsDisableHostnameVerification() throws Exception { Settings settings2 = Settings.builder() .put("xpack.security.http.ssl.key", keyPath) .put("xpack.security.http.ssl.certificate", certPath) + .putList("xpack.security.http.ssl.supported_protocols", getProtocols()) .setSecureSettings(secureSettings) .build(); @@ -234,6 +242,7 @@ public void testHttpsClientAuth() throws Exception { Settings settings = Settings.builder() .put("xpack.http.ssl.key", keyPath) .put("xpack.http.ssl.certificate", certPath) + .putList("xpack.http.ssl.supported_protocols", getProtocols()) .setSecureSettings(secureSettings) .build(); @@ -370,6 +379,7 @@ public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { Settings serverSettings = Settings.builder() .put("xpack.http.ssl.key", keyPath) .put("xpack.http.ssl.certificate", certPath) + .putList("xpack.security.http.ssl.supported_protocols", getProtocols()) .setSecureSettings(serverSecureSettings) .build(); TestsSSLService sslService = new TestsSSLService(serverSettings, environment); @@ -379,11 +389,12 @@ public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { proxyServer.start(); Settings settings = Settings.builder() - .put(HttpSettings.PROXY_HOST.getKey(), "localhost") - .put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort()) - .put(HttpSettings.PROXY_SCHEME.getKey(), "https") + .put(HttpSettings.PROXY_HOST.getKey(), "localhost") + .put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort()) + .put(HttpSettings.PROXY_SCHEME.getKey(), "https") .put("xpack.http.ssl.certificate_authorities", trustedCertPath) - .build(); + .putList("xpack.security.http.ssl.supported_protocols", getProtocols()) + .build(); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) .method(HttpMethod.GET) @@ -737,4 +748,21 @@ public static ClusterService mockClusterService() { private String getWebserverUri() { return String.format(Locale.ROOT, "http://%s:%s", webServer.getHostName(), webServer.getPort()); } + + /** + * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to + * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK + */ + private static List getProtocols() { + if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + return List.of("TLSv1.2"); + } else { + JavaVersion full = + AccessController.doPrivileged((PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version"))); + if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + return List.of("TLSv1.2"); + } + } + return XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS; + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java index a96c04ab7cd99..58a649de48c02 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; @@ -223,7 +224,8 @@ public void testThatTemplatesExist() { // otherwise a rolling upgrade would not work as expected, when the node has a .watches shard on it public void testThatTemplatesAreAppliedOnNewerNodes() { DiscoveryNode localNode = new DiscoveryNode("node", ESTestCase.buildNewFakeTransportAddress(), Version.CURRENT); - DiscoveryNode masterNode = new DiscoveryNode("master", ESTestCase.buildNewFakeTransportAddress(), Version.V_6_0_0); + DiscoveryNode masterNode = new DiscoveryNode("master", ESTestCase.buildNewFakeTransportAddress(), + VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT)); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("master").add(localNode).add(masterNode).build(); ClusterChangedEvent event = createClusterChangedEvent(Arrays.asList(WatcherIndexTemplateRegistryField.TRIGGERED_TEMPLATE_NAME, diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index f6c46f6c68f71..9492e50048d4e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; -import java.util.concurrent.TimeUnit; - import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction; @@ -25,7 +23,6 @@ import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.templateRequest; import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule; import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class RejectedExecutionTests extends AbstractWatcherIntegrationTestCase { @@ -36,8 +33,7 @@ protected boolean timeWarped() { return false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/41734") - public void testHistoryAndTriggeredOnRejection() throws Exception { + public void testHistoryOnRejection() throws Exception { WatcherClient watcherClient = watcherClient(); createIndex("idx"); client().prepareIndex("idx", "_doc").setSource("field", "a").get(); @@ -56,11 +52,7 @@ public void testHistoryAndTriggeredOnRejection() throws Exception { flushAndRefresh(".watcher-history-*"); SearchResponse searchResponse = client().prepareSearch(".watcher-history-*").get(); assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); - }, 10, TimeUnit.SECONDS); - - flushAndRefresh(".triggered_watches"); - SearchResponse searchResponse = client().prepareSearch(".triggered_watches").get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + }); } @Override diff --git a/x-pack/qa/evil-tests/build.gradle b/x-pack/qa/evil-tests/build.gradle index d411909fb310b..bbb1c5804086f 100644 --- a/x-pack/qa/evil-tests/build.gradle +++ b/x-pack/qa/evil-tests/build.gradle @@ -2,6 +2,9 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 88248f89b72c5..e4c261b4c5d57 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -14,6 +14,9 @@ integTest.enabled = false dependencies { testCompile "org.elasticsearch.plugin:x-pack-core:${version}" testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } @@ -74,4 +77,3 @@ task copyKeytabToGeneratedResources(type: Copy) { dependsOn project(':test:fixtures:krb5kdc-fixture').postProcessFixture } project.sourceSets.test.output.dir(generatedResources, builtBy:copyKeytabToGeneratedResources) - diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 72fd21c993278..34325bc69b624 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -8,6 +8,9 @@ dependencies { // "org.elasticsearch.plugin:x-pack-core:${version}" doesn't work with idea because the testArtifacts are also here testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } testFixtures.useFixture ":x-pack:test:idp-fixture" @@ -32,7 +35,6 @@ integTestCluster { setting 'xpack.security.authc.realms.native.native.order', '1' // OpenID Connect Realm 1 configured for authorization grant flow setting 'xpack.security.authc.realms.oidc.c2id.order', '2' - setting 'xpack.security.authc.realms.oidc.c2id.op.name', 'c2id-op' setting 'xpack.security.authc.realms.oidc.c2id.op.issuer', 'http://localhost:8080' setting 'xpack.security.authc.realms.oidc.c2id.op.authorization_endpoint', "http://127.0.0.1:${-> ephemeralPort}/c2id-login" setting 'xpack.security.authc.realms.oidc.c2id.op.token_endpoint', "http://127.0.0.1:${-> ephemeralPort}/c2id/token" @@ -48,7 +50,6 @@ integTestCluster { setting 'xpack.security.authc.realms.oidc.c2id.claims.groups', 'groups' // OpenID Connect Realm 2 configured for implicit flow setting 'xpack.security.authc.realms.oidc.c2id-implicit.order', '3' - setting 'xpack.security.authc.realms.oidc.c2id-implicit.op.name', 'c2id-implicit' setting 'xpack.security.authc.realms.oidc.c2id-implicit.op.issuer', 'http://localhost:8080' setting 'xpack.security.authc.realms.oidc.c2id-implicit.op.authorization_endpoint', "http://127.0.0.1:${-> ephemeralPort}/c2id-login" setting 'xpack.security.authc.realms.oidc.c2id-implicit.op.token_endpoint', "http://127.0.0.1:${-> ephemeralPort}/c2id/token" @@ -63,7 +64,7 @@ integTestCluster { setting 'xpack.security.authc.realms.oidc.c2id-implicit.claims.mail', 'email' setting 'xpack.security.authc.realms.oidc.c2id-implicit.claims.groups', 'groups' setting 'xpack.ml.enabled', 'false' - + extraConfigFile 'op-jwks.json', idpFixtureProject.file("oidc/op-jwks.json") setupCommand 'setupTestAdmin', @@ -81,4 +82,4 @@ integTestCluster { } } -thirdPartyAudit.enabled = false \ No newline at end of file +thirdPartyAudit.enabled = false diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 5305699b9a0c7..f732d8fc5b030 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -6,6 +6,9 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } testFixtures.useFixture ":x-pack:test:idp-fixture" diff --git a/x-pack/qa/reindex-tests-with-security/build.gradle b/x-pack/qa/reindex-tests-with-security/build.gradle index 64e1c61b60717..5a201832e7c39 100644 --- a/x-pack/qa/reindex-tests-with-security/build.gradle +++ b/x-pack/qa/reindex-tests-with-security/build.gradle @@ -8,6 +8,9 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } testCompile project(path: ':modules:reindex') } @@ -36,6 +39,12 @@ integTestCluster { setting 'xpack.security.http.ssl.key_passphrase', 'http-password' setting 'reindex.ssl.truststore.path', 'ca.p12' setting 'reindex.ssl.truststore.password', 'password' + + // Workaround for JDK-8212885 + if (project.ext.runtimeJavaVersion.isJava12Compatible() == false) { + setting 'reindex.ssl.supported_protocols', 'TLSv1.2' + } + extraConfigFile 'roles.yml', 'roles.yml' [ test_admin: 'superuser', diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 28b6428146e45..64a2eda823e04 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -143,9 +143,11 @@ project.sourceSets.test.output.dir(outputDir, builtBy: copyXPackRestSpec) repositories { maven { + name "elastic" url "https://artifacts.elastic.co/maven" } maven { + name "elastic-snapshots" url "https://snapshots.elastic.co/maven" } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java index 03c28c05e616b..035e29ccf771c 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupDateHistoUpgradeIT.java @@ -34,7 +34,7 @@ public class RollupDateHistoUpgradeIT extends AbstractUpgradeTestCase { Version.fromString(System.getProperty("tests.upgrade_from_version")); public void testDateHistoIntervalUpgrade() throws Exception { - assumeTrue("DateHisto interval changed in 7.1", UPGRADE_FROM_VERSION.before(Version.V_8_0_0)); // TODO change this after backport + assumeTrue("DateHisto interval changed in 7.1", UPGRADE_FROM_VERSION.before(Version.V_7_2_0)); switch (CLUSTER_TYPE) { case OLD: break; diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml index e453014258a24..4d732015d47f4 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/40_ml_datafeed_crud.yml @@ -1,3 +1,8 @@ +setup: + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42258" + --- "Test old cluster datafeed without aggs": - do: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml index bce9c25c08c03..2a7b56adb9a16 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/40_ml_datafeed_crud.yml @@ -1,3 +1,8 @@ +setup: + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42258" + --- "Put job and datafeed without aggs in old cluster": @@ -48,8 +53,8 @@ --- "Put job and datafeed with aggs in old cluster - pre-deprecated interval": - skip: - version: "8.0.0 - " #TODO change this after backport - reason: calendar_interval introduced in 7.1.0 + version: "all" #TODO change this after backport + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42258; calendar_interval introduced in 7.2.0" - do: ml.put_job: diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml index 5dc71ecb0679e..4b742e10de61f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/40_ml_datafeed_crud.yml @@ -1,4 +1,8 @@ setup: + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42258" + - do: cluster.health: wait_for_status: green diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index 4355ac0b5b825..25d9d9037b719 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -103,7 +103,7 @@ thirdPartyAudit { 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1' ) - ignoreMissingClasses ( + ignoreMissingClasses ( 'com.ibm.icu.lang.UCharacter' ) } diff --git a/x-pack/qa/security-migrate-tests/build.gradle b/x-pack/qa/security-migrate-tests/build.gradle deleted file mode 100644 index 1851f0e21b027..0000000000000 --- a/x-pack/qa/security-migrate-tests/build.gradle +++ /dev/null @@ -1,43 +0,0 @@ -apply plugin: 'elasticsearch.standalone-rest-test' -apply plugin: 'elasticsearch.rest-test' - -dependencies { - testCompile "org.elasticsearch.plugin:x-pack-core:${version}" - testCompile project(path: xpackModule('security'), configuration: 'runtime') - testCompile project(path: xpackProject('transport-client').path, configuration: 'runtime') -} - -integTestCluster { - setting 'xpack.security.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - extraConfigFile 'roles.yml', 'roles.yml' - [ - test_admin: 'superuser', - transport_user: 'superuser', - existing: 'superuser', - bob: 'actual_role' - ].each { String user, String role -> - setupCommand 'setupUser#' + user, - 'bin/elasticsearch-users', 'useradd', user, '-p', 'x-pack-test-password', '-r', role - } - waitCondition = { node, ant -> - File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", - dest: tmpFile.toString(), - username: 'test_admin', - password: 'x-pack-test-password', - ignoreerrors: true, - retries: 10) - return tmpFile.exists() - } - // TODO: systemProperty('tests.cluster', "${-> cluster.transportPortURI }") when migerating to testclusters -} - -testingConventions { - naming.clear() - naming { - IT { - baseClass 'org.elasticsearch.xpack.security.MigrateToolTestCase' - } - } -} diff --git a/x-pack/qa/security-migrate-tests/roles.yml b/x-pack/qa/security-migrate-tests/roles.yml deleted file mode 100644 index 6e997383f8a5a..0000000000000 --- a/x-pack/qa/security-migrate-tests/roles.yml +++ /dev/null @@ -1,22 +0,0 @@ -# A role that has all sorts of configuration: -# - it can monitor the cluster -# - for index1 and index2 it can do CRUD things and refresh -# - for other indices it has search-only privileges -actual_role: - run_as: [ "joe" ] - cluster: - - monitor - indices: - - names: [ "index1", "index2" ] - privileges: [ "read", "write", "create_index", "indices:admin/refresh" ] - field_security: - grant: - - foo - - bar - query: - bool: - must_not: - match: - hidden: true - - names: "*" - privileges: [ "read" ] diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java deleted file mode 100644 index 3581bf2fda7fd..0000000000000 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import joptsimple.OptionParser; -import joptsimple.OptionSet; - -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.Requests; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; -import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse; -import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; -import org.elasticsearch.xpack.core.security.authc.support.Hasher; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool; -import org.junit.Before; - -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Collections; - -import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.hamcrest.Matchers.containsString; - -/** - * Integration tests for the {@code elasticsearch-migrate} shell command - */ -public class MigrateToolIT extends MigrateToolTestCase { - - @Before - public void setupUpTest() throws Exception { - Client client = getClient(); - SecurityClient c = new SecurityClient(client); - - // Add an existing user so the tool will skip it - PutUserResponse pur = c.preparePutUser("existing", "s3kirt".toCharArray(), Hasher.BCRYPT, "role1", "user").get(); - assertTrue(pur.created()); - } - - public void testRunMigrateTool() throws Exception { - final String testConfigDir = System.getProperty("tests.config.dir"); - logger.info("--> CONF: {}", testConfigDir); - final Path configPath = PathUtils.get(testConfigDir); - Settings settings = Settings.builder().put("path.home", configPath.getParent()).build(); - // Cluster should already be up - String url = "http://" + getHttpURL(); - logger.info("--> using URL: {}", url); - MockTerminal t = new MockTerminal(); - ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); - OptionParser parser = muor.getParser(); - - OptionSet options = parser.parse("-u", "test_admin", "-p", "x-pack-test-password", "-U", url); - muor.execute(t, options, new Environment(settings, configPath)); - - logger.info("--> output:\n{}", t.getOutput()); - - Client client = getClient(); - SecurityClient c = new SecurityClient(client); - - // Check that the migrated user can be retrieved - GetUsersResponse resp = c.prepareGetUsers("bob").get(); - assertTrue("user 'bob' should exist", resp.hasUsers()); - User bob = resp.users()[0]; - assertEquals(bob.principal(), "bob"); - assertArrayEquals(bob.roles(), new String[]{"actual_role"}); - - // Make sure the existing user did not change - resp = c.prepareGetUsers("existing").get(); - assertTrue("user should exist", resp.hasUsers()); - User existing = resp.users()[0]; - assertEquals(existing.principal(), "existing"); - assertArrayEquals(existing.roles(), new String[]{"role1", "user"}); - - // Make sure the "actual_role" made it in and is correct - GetRolesResponse roleResp = c.prepareGetRoles().names("actual_role").get(); - assertTrue("role should exist", roleResp.hasRoles()); - RoleDescriptor rd = roleResp.roles()[0]; - assertNotNull(rd); - assertEquals(rd.getName(), "actual_role"); - assertArrayEquals(rd.getClusterPrivileges(), new String[]{"monitor"}); - assertArrayEquals(rd.getRunAs(), new String[]{"joe"}); - RoleDescriptor.IndicesPrivileges[] ips = rd.getIndicesPrivileges(); - assertEquals(ips.length, 2); - for (RoleDescriptor.IndicesPrivileges ip : ips) { - final FieldPermissions fieldPermissions = new FieldPermissions( - new FieldPermissionsDefinition(ip.getGrantedFields(), ip.getDeniedFields())); - if (Arrays.equals(ip.getIndices(), new String[]{"index1", "index2"})) { - assertArrayEquals(ip.getPrivileges(), new String[]{"read", "write", "create_index", "indices:admin/refresh"}); - assertTrue(fieldPermissions.hasFieldLevelSecurity()); - assertTrue(fieldPermissions.grantsAccessTo("bar")); - assertTrue(fieldPermissions.grantsAccessTo("foo")); - assertNotNull(ip.getQuery()); - assertThat(ip.getQuery().iterator().next().utf8ToString(), - containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}")); - } else { - assertArrayEquals(ip.getIndices(), new String[]{"*"}); - assertArrayEquals(ip.getPrivileges(), new String[]{"read"}); - assertFalse(fieldPermissions.hasFieldLevelSecurity()); - assertNull(ip.getQuery()); - } - } - - // Check that bob can access the things the "actual_role" says he can - String token = basicAuthHeaderValue("bob", new SecureString("x-pack-test-password".toCharArray())); - // Create "index1" index and try to search from it as "bob" - client.filterWithHeader(Collections.singletonMap("Authorization", token)).admin().indices().prepareCreate("index1").get(); - // Wait for the index to be ready so it doesn't fail if no shards are initialized - client.admin().cluster().health(Requests.clusterHealthRequest("index1") - .timeout(TimeValue.timeValueSeconds(30)) - .waitForYellowStatus() - .waitForEvents(Priority.LANGUID) - .waitForNoRelocatingShards(true)) - .actionGet(); - client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get(); - } -} diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java deleted file mode 100644 index 0111aeff4cca2..0000000000000 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; -import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient; -import org.elasticsearch.xpack.core.security.SecurityField; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.file.Path; -import java.util.concurrent.atomic.AtomicInteger; - -import static org.hamcrest.Matchers.notNullValue; - -/** - * {@link MigrateToolTestCase} is an abstract base class to run integration - * tests against an external Elasticsearch Cluster. - *

- * You can define a list of transport addresses from where you can reach your cluster - * by setting "tests.cluster" system property. It defaults to "localhost:9300". - *

- * All tests can be run from maven using mvn install as maven will start an external cluster first. - *

- * If you want to debug this module from your IDE, then start an external cluster by yourself - * then run JUnit. If you changed the default port, set "tests.cluster=localhost:PORT" when running - * your test. - */ -@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") -public abstract class MigrateToolTestCase extends LuceneTestCase { - - /** - * Key used to eventually switch to using an external cluster and provide its transport addresses - */ - public static final String TESTS_CLUSTER = "tests.cluster"; - - /** - * Key used to eventually switch to using an external cluster and provide its transport addresses - */ - public static final String TESTS_HTTP_CLUSTER = "tests.rest.cluster"; - - /** - * Defaults to localhost:9300 - */ - public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - - protected static final Logger logger = LogManager.getLogger(MigrateToolTestCase.class); - - private static final AtomicInteger counter = new AtomicInteger(); - private static Client client; - private static String clusterAddresses; - private static String clusterHttpAddresses; - - private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { - logger.info("--> Starting Elasticsearch Java TransportClient {}, {}", transportAddresses, tempDir); - - Settings clientSettings = Settings.builder() - .put("cluster.name", "qa_migrate_tests_" + counter.getAndIncrement()) - .put("client.transport.ignore_cluster_name", true) - .put("path.home", tempDir) - .put(SecurityField.USER_SETTING.getKey(), "transport_user:x-pack-test-password") - .build(); - - TransportClient client = new PreBuiltXPackTransportClient(clientSettings).addTransportAddresses(transportAddresses); - Exception clientException = null; - try { - logger.info("--> Elasticsearch Java TransportClient started"); - ClusterHealthResponse health = client.admin().cluster().prepareHealth().get(); - logger.info("--> connected to [{}] cluster which is running [{}] node(s).", - health.getClusterName(), health.getNumberOfNodes()); - } catch (Exception e) { - clientException = e; - } - - assumeNoException("Sounds like your cluster is not running at " + clusterAddresses, clientException); - - return client; - } - - private static Client startClient() throws UnknownHostException { - String[] stringAddresses = clusterAddresses.split(","); - TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length]; - int i = 0; - for (String stringAddress : stringAddresses) { - int lastColon = stringAddress.lastIndexOf(":"); - if (lastColon == -1) { - throw new IllegalArgumentException("address [" + clusterAddresses + "] not valid"); - } - String ip = stringAddress.substring(0, lastColon); - String port = stringAddress.substring(lastColon + 1); - try { - transportAddresses[i++] = new TransportAddress(InetAddress.getByName(ip), Integer.valueOf(port)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException("port is not valid, expected number but was [" + port + "]"); - } - } - return startClient(createTempDir(), transportAddresses); - } - - public static Client getClient() { - if (client == null) { - try { - client = startClient(); - } catch (UnknownHostException e) { - logger.error("could not start the client", e); - } - assertThat(client, notNullValue()); - } - return client; - } - - public static String getHttpURL() { - return clusterHttpAddresses; - } - - @BeforeClass - public static void initializeSettings() throws UnknownHostException { - clusterAddresses = System.getProperty(TESTS_CLUSTER); - clusterHttpAddresses = System.getProperty(TESTS_HTTP_CLUSTER); - if (clusterAddresses == null || clusterAddresses.isEmpty()) { - throw new UnknownHostException("unable to get a cluster address"); - } - } - - @AfterClass - public static void stopTransportClient() { - if (client != null) { - client.close(); - client = null; - } - } - - @Before - public void defineIndexName() { - doClean(); - } - - @After - public void cleanIndex() { - doClean(); - } - - private void doClean() { - if (client != null) { - try { - client.admin().indices().prepareDelete("_all").get(); - } catch (Exception e) { - // We ignore this cleanup exception - } - } - } -} diff --git a/x-pack/qa/security-setup-password-tests/build.gradle b/x-pack/qa/security-setup-password-tests/build.gradle index c0801a38b570c..604fffbe32f76 100644 --- a/x-pack/qa/security-setup-password-tests/build.gradle +++ b/x-pack/qa/security-setup-password-tests/build.gradle @@ -6,10 +6,14 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('security'), configuration: 'runtime') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + if (isEclipse) { + testCompile project(path: xpackModule('core-tests'), configuration: 'testArtifacts') + } } integTestRunner { systemProperty 'tests.security.manager', 'false' + // TODO add tests.config.dir = {cluster.singleNode().getConfigDir()} when converting to testclusters } integTestCluster { diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java index 902115e82925d..bf53dfa83103e 100644 --- a/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java +++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/java/org/elasticsearch/smoketest/SmokeTestWatcherWithSecurityIT.java @@ -83,7 +83,7 @@ public void startWatcher() throws Exception { }); assertBusy(() -> { - for (String template : XPackRestTestConstants.TEMPLATE_NAMES) { + for (String template : XPackRestTestConstants.TEMPLATE_NAMES_NO_ILM) { assertOK(adminClient().performRequest(new Request("HEAD", "_template/" + template))); } }); diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java index 1a6a59fbc696b..bfdf051a29235 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java @@ -11,13 +11,10 @@ public final class XPackRestTestConstants { // Watcher constants: public static final String INDEX_TEMPLATE_VERSION = "9"; - public static final String HISTORY_TEMPLATE_NAME = ".watch-history-" + INDEX_TEMPLATE_VERSION; public static final String HISTORY_TEMPLATE_NAME_NO_ILM = ".watch-history-no-ilm-" + INDEX_TEMPLATE_VERSION; public static final String TRIGGERED_TEMPLATE_NAME = ".triggered_watches"; public static final String WATCHES_TEMPLATE_NAME = ".watches"; - public static final String[] TEMPLATE_NAMES = new String[] { - HISTORY_TEMPLATE_NAME, TRIGGERED_TEMPLATE_NAME, WATCHES_TEMPLATE_NAME - }; + public static final String[] TEMPLATE_NAMES_NO_ILM = new String[] { HISTORY_TEMPLATE_NAME_NO_ILM, TRIGGERED_TEMPLATE_NAME, WATCHES_TEMPLATE_NAME }; diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash index c267744194a1c..bafe7d9342f0e 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash @@ -17,7 +17,6 @@ verify_xpack_installation() { 'elasticsearch-certgen' 'elasticsearch-certutil' 'elasticsearch-croneval' - 'elasticsearch-migrate' 'elasticsearch-saml-metadata' 'elasticsearch-setup-passwords' 'elasticsearch-sql-cli'