Skip to content

Commit

Permalink
Merge branch 'master' into ccr-docs
Browse files Browse the repository at this point in the history
* master: (74 commits)
  XContent: Check for bad parsers (elastic#34561)
  Docs: Align prose with snippet (elastic#34839)
  document the search context is freed if the scroll is not extended (elastic#34739)
  Test: Lookup node versions on rest test start (elastic#34657)
  SQL: Return error with ORDER BY on non-grouped. (elastic#34855)
  Reduce channels in AbstractSimpleTransportTestCase (elastic#34863)
  [DOCS] Updates Elasticsearch monitoring tasks (elastic#34339)
  Check self references in metric agg after last doc collection (elastic#33593) (elastic#34001)
  [Docs] Add `indices.query.bool.max_clause_count` setting (elastic#34779)
  Add 6.6.0 version to master (elastic#34847)
  Test: ensure char[] doesn't being with prefix (elastic#34816)
  Remove static import from HLRC doc snippet (elastic#34834)
  Logging: server: clean up logging (elastic#34593)
  Logging: tests: clean up logging (elastic#34606)
  SQL: Fix edge case: `<field> IN (null)` (elastic#34802)
  [Test] Mute FullClusterRestartIT.testShrink() until test is fixed
  SQL: Introduce ODBC mode, similar to JDBC (elastic#34825)
  SQL: handle X-Pack or X-Pack SQL not being available in a more graceful way (elastic#34736)
  [Docs] Add explanation for code snippets line width (elastic#34796)
  CCR: Rename follow-task parameters and stats (elastic#34836)
  ...
  • Loading branch information
jasontedor committed Oct 25, 2018
2 parents 6fbe1cd + 3cde135 commit 843ae8f
Show file tree
Hide file tree
Showing 568 changed files with 8,045 additions and 4,080 deletions.
1 change: 1 addition & 0 deletions CONTRIBUTING.md
Expand Up @@ -159,6 +159,7 @@ Please follow these formatting guidelines:

* Java indent is 4 spaces
* Line width is 140 characters
* Line width for code snippets that are included in the documentation (the ones surrounded by `// tag` and `// end` comments) is 76 characters
* The rest is left to Java coding standards
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE:
Expand Down
55 changes: 41 additions & 14 deletions buildSrc/build.gradle
Expand Up @@ -31,22 +31,12 @@ if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) {
throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch")
}

if (JavaVersion.current() < JavaVersion.VERSION_1_8) {
throw new GradleException('Java 1.8 is required to build elasticsearch gradle tools')
}

if (project == rootProject) {
// change the build dir used during build init, so that doing a clean
// won't wipe out the buildscript jar
buildDir = 'build-bootstrap'
}

// Make sure :buildSrc: doesn't generate classes incompatible with RUNTIME_JAVA_HOME
// We can't use BuildPlugin here, so read from file
String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim()
targetCompatibility = minimumRuntimeVersion
sourceCompatibility = minimumRuntimeVersion

/*****************************************************************************
* Propagating version.properties to the rest of the build *
*****************************************************************************/
Expand Down Expand Up @@ -82,6 +72,45 @@ processResources {
from tempPropertiesFile
}


if (JavaVersion.current() < JavaVersion.VERSION_1_10) {
throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools')
}

/*****************************************************************************
* Java version *
*****************************************************************************/

// Gradle 4.10 does not support setting this to 11 yet
targetCompatibility = "10"
sourceCompatibility = "10"

// We have a few classes that need to be compiled for older java versions because these are used to run checks against
// those
sourceSets {
minimumRuntime {
// We only want Java here, but the Groovy doesn't configure javadoc correctly if we don't define this as groovy
groovy {
srcDirs = ['src/main/minimumRuntime']
}
}
}
compileMinimumRuntimeGroovy {
// We can't use BuildPlugin here, so read from file
String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim()
targetCompatibility = minimumRuntimeVersion
sourceCompatibility = minimumRuntimeVersion
}
dependencies {
compile sourceSets.minimumRuntime.output
minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}"
minimumRuntimeCompile localGroovy()
}
jar {
from sourceSets.minimumRuntime.output
}


/*****************************************************************************
* Dependencies used by the entire build *
*****************************************************************************/
Expand All @@ -94,10 +123,7 @@ dependencies {
compile localGroovy()
compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}"
compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"

compile("junit:junit:${props.getProperty('junit')}") {
transitive = false
}

compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
Expand Down Expand Up @@ -156,6 +182,7 @@ if (project != rootProject) {
dependenciesInfo.enabled = false
forbiddenApisMain.enabled = false
forbiddenApisTest.enabled = false
forbiddenApisMinimumRuntime.enabled = false
jarHell.enabled = false
thirdPartyAudit.enabled = false

Expand Down
Expand Up @@ -99,12 +99,14 @@ class BuildPlugin implements Plugin<Project> {
configureSourcesJar(project)
configurePomGeneration(project)

applyCommonTestConfig(project)
configureTest(project)
configurePrecommit(project)
configureDependenciesInfo(project)
}



/** Performs checks on the build environment and prints information about the build environment. */
static void globalBuildInfo(Project project) {
if (project.rootProject.ext.has('buildChecksDone') == false) {
Expand Down Expand Up @@ -776,9 +778,8 @@ class BuildPlugin implements Plugin<Project> {
}
}

/** Returns a closure of common configuration shared by unit and integration tests. */
static Closure commonTestConfig(Project project) {
return {
static void applyCommonTestConfig(Project project) {
project.tasks.withType(RandomizedTestingTask) {
jvm "${project.runtimeJavaHome}/bin/java"
parallelism System.getProperty('tests.jvms', 'auto')
ifNoTests System.getProperty('tests.ifNoTests', 'fail')
Expand Down Expand Up @@ -873,6 +874,8 @@ class BuildPlugin implements Plugin<Project> {

exclude '**/*$*.class'

dependsOn(project.tasks.testClasses)

project.plugins.withType(ShadowPlugin).whenPluginAdded {
// Test against a shadow jar if we made one
classpath -= project.tasks.compileJava.outputs.files
Expand All @@ -884,23 +887,9 @@ class BuildPlugin implements Plugin<Project> {

/** Configures the test task */
static Task configureTest(Project project) {
RandomizedTestingTask test = project.tasks.getByName('test')
test.configure(commonTestConfig(project))
test.configure {
project.tasks.getByName('test') {
include '**/*Tests.class'
}

// Add a method to create additional unit tests for a project, which will share the same
// randomized testing setup, but by default run no tests.
project.extensions.add('additionalTest', { String name, Closure config ->
RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class)
additionalTest.classpath = test.classpath
additionalTest.testClassesDirs = test.testClassesDirs
additionalTest.configure(commonTestConfig(project))
additionalTest.configure(config)
additionalTest.dependsOn(project.tasks.testClasses)
project.check.dependsOn(additionalTest)
});
}

private static configurePrecommit(Project project) {
Expand Down
Expand Up @@ -68,7 +68,9 @@ class ClusterConfiguration {
* In case of more than one node, this defaults to the number of nodes
*/
@Input
Closure<Integer> minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes() : -1 }
Closure<Integer> minimumMasterNodes = {
return getNumNodes() > 1 ? getNumNodes() : -1
}

@Input
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
Expand Down
Expand Up @@ -122,8 +122,31 @@ class ClusterFormationTasks {
}
NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir)
nodes.add(node)
Object dependsOn = startTasks.empty ? startDependencies : startTasks.get(0)
startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, nodes.get(0)))
Closure<Map> writeConfigSetup
Object dependsOn
if (node.nodeVersion.onOrAfter("6.5.0-SNAPSHOT")) {
writeConfigSetup = { Map esConfig ->
// Don't force discovery provider if one is set by the test cluster specs already
if (esConfig.containsKey('discovery.zen.hosts_provider') == false) {
esConfig['discovery.zen.hosts_provider'] = 'file'
}
esConfig['discovery.zen.ping.unicast.hosts'] = []
esConfig
}
dependsOn = startDependencies
} else {
dependsOn = startTasks.empty ? startDependencies : startTasks.get(0)
writeConfigSetup = { Map esConfig ->
String unicastTransportUri = node.config.unicastTransportUri(nodes.get(0), node, project.ant)
if (unicastTransportUri == null) {
esConfig['discovery.zen.ping.unicast.hosts'] = []
} else {
esConfig['discovery.zen.ping.unicast.hosts'] = "\"${unicastTransportUri}\""
}
esConfig
}
}
startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, writeConfigSetup))
}

Task wait = configureWaitTask("${prefix}#wait", project, nodes, startTasks, config.nodeStartupWaitSeconds)
Expand Down Expand Up @@ -182,7 +205,7 @@ class ClusterFormationTasks {
* @return a task which starts the node.
*/
static Task configureNode(Project project, String prefix, Task runner, Object dependsOn, NodeInfo node, ClusterConfiguration config,
Configuration distribution, NodeInfo seedNode) {
Configuration distribution, Closure<Map> writeConfig) {

// tasks are chained so their execution order is maintained
Task setup = project.tasks.create(name: taskName(prefix, node, 'clean'), type: Delete, dependsOn: dependsOn) {
Expand All @@ -198,7 +221,7 @@ class ClusterFormationTasks {
setup = configureCheckPreviousTask(taskName(prefix, node, 'checkPrevious'), project, setup, node)
setup = configureStopTask(taskName(prefix, node, 'stopPrevious'), project, setup, node)
setup = configureExtractTask(taskName(prefix, node, 'extract'), project, setup, node, distribution)
setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, seedNode)
setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, writeConfig)
setup = configureCreateKeystoreTask(taskName(prefix, node, 'createKeystore'), project, setup, node)
setup = configureAddKeystoreSettingTasks(prefix, project, setup, node)
setup = configureAddKeystoreFileTasks(prefix, project, setup, node)
Expand Down Expand Up @@ -301,7 +324,7 @@ class ClusterFormationTasks {
}

/** Adds a task to write elasticsearch.yml for the given node configuration */
static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node, NodeInfo seedNode) {
static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node, Closure<Map> configFilter) {
Map esConfig = [
'cluster.name' : node.clusterName,
'node.name' : "node-" + node.nodeNum,
Expand Down Expand Up @@ -347,10 +370,7 @@ class ClusterFormationTasks {

Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
writeConfig.doFirst {
String unicastTransportUri = node.config.unicastTransportUri(seedNode, node, project.ant)
if (unicastTransportUri != null) {
esConfig['discovery.zen.ping.unicast.hosts'] = "\"${unicastTransportUri}\""
}
esConfig = configFilter.call(esConfig)
File configFile = new File(node.pathConf, 'elasticsearch.yml')
logger.info("Configuring ${configFile}")
configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8')
Expand Down Expand Up @@ -681,6 +701,19 @@ class ClusterFormationTasks {
static Task configureWaitTask(String name, Project project, List<NodeInfo> nodes, List<Task> startTasks, int waitSeconds) {
Task wait = project.tasks.create(name: name, dependsOn: startTasks)
wait.doLast {

Collection<String> unicastHosts = new HashSet<>()
nodes.forEach { otherNode ->
String unicastHost = otherNode.config.unicastTransportUri(otherNode, null, project.ant)
if (unicastHost != null) {
unicastHosts.addAll(Arrays.asList(unicastHost.split(",")))
}
}
String unicastHostsTxt = String.join("\n", unicastHosts)
nodes.forEach { node ->
node.pathConf.toPath().resolve("unicast_hosts.txt").setText(unicastHostsTxt)
}

ant.waitfor(maxwait: "${waitSeconds}", maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") {
or {
for (NodeInfo node : nodes) {
Expand Down
Expand Up @@ -64,8 +64,6 @@ public class RestIntegTestTask extends DefaultTask {
runner.testClassesDirs = project.sourceSets.test.output.classesDirs
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)

// start with the common test configuration
runner.configure(BuildPlugin.commonTestConfig(project))
// override/add more for rest tests
runner.parallelism = '1'
runner.include('**/*IT.class')
Expand Down
Expand Up @@ -50,6 +50,7 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask)
BuildPlugin.globalBuildInfo(project)
BuildPlugin.configureRepositories(project)
BuildPlugin.applyCommonTestConfig(project)

// only setup tests to build
project.sourceSets.create('test')
Expand Down
Expand Up @@ -24,7 +24,6 @@ import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.compile.JavaCompile

/**
* Configures the build to compile against Elasticsearch's test framework and
Expand All @@ -44,7 +43,6 @@ public class StandaloneTestPlugin implements Plugin<Project> {
description: 'Runs unit tests that are separate'
]
RandomizedTestingTask test = project.tasks.create(testOptions)
test.configure(BuildPlugin.commonTestConfig(project))
BuildPlugin.configureCompile(project)
test.classpath = project.sourceSets.test.runtimeClasspath
test.testClassesDirs = project.sourceSets.test.output.classesDirs
Expand Down
Expand Up @@ -39,8 +39,8 @@

public class TestClustersPlugin implements Plugin<Project> {

public static final String LIST_TASK_NAME = "listElasticSearchClusters";
public static final String EXTENSION_NAME = "elasticSearchClusters";
private static final String LIST_TASK_NAME = "listTestClusters";
private static final String NODE_EXTENSION_NAME = "testClusters";

private final Logger logger = Logging.getLogger(TestClustersPlugin.class);

Expand All @@ -50,7 +50,7 @@ public void apply(Project project) {
ElasticsearchNode.class,
(name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project))
);
project.getExtensions().add(EXTENSION_NAME, container);
project.getExtensions().add(NODE_EXTENSION_NAME, container);

Task listTask = project.getTasks().create(LIST_TASK_NAME);
listTask.setGroup("ES cluster formation");
Expand Down
3 changes: 0 additions & 3 deletions buildSrc/src/main/resources/checkstyle_suppressions.xml
Expand Up @@ -614,9 +614,6 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsDocCountErrorIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsShardMinDocCountIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregatorTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]ExtendedStatsBucketIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]moving[/\\]avg[/\\]MovAvgIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]serialdiff[/\\]SerialDiffIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWhileCreatingIndexIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWhileRelocatingIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomExceptionsIT.java" checks="LineLength" />
Expand Down
Expand Up @@ -31,11 +31,11 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase {
public void testListClusters() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("testclusters"))
.withArguments("listElasticSearchClusters", "-s")
.withArguments("listTestClusters", "-s")
.withPluginClasspath()
.build();

assertEquals(TaskOutcome.SUCCESS, result.task(":listElasticSearchClusters").getOutcome());
assertEquals(TaskOutcome.SUCCESS, result.task(":listTestClusters").getOutcome());
assertOutputContains(
result.getOutput(),
" * myTestCluster:"
Expand Down
14 changes: 7 additions & 7 deletions buildSrc/src/testKit/testclusters/build.gradle
Expand Up @@ -2,40 +2,40 @@ plugins {
id 'elasticsearch.testclusters'
}

elasticSearchClusters {
testClusters {
myTestCluster {
distribution = 'ZIP'
}
}

task user1 {
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
doLast {
println "user1 executing"
}
}

task user2 {
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
doLast {
println "user2 executing"
}
}

task upToDate1 {
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
}

task upToDate2 {
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
}

task skipped1 {
enabled = false
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
}

task skipped2 {
enabled = false
useCluster elasticSearchClusters.myTestCluster
useCluster testClusters.myTestCluster
}
Expand Up @@ -31,6 +31,8 @@

final class ClusterRequestConverters {

private ClusterRequestConverters() {}

static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings");

Expand Down

0 comments on commit 843ae8f

Please sign in to comment.