Skip to content

Commit

Permalink
Merge branch 'master' into build-flavor-type-leniency
Browse files Browse the repository at this point in the history
* master: (63 commits)
  Suppress lease background sync failures if stopping (elastic#40902)
  [DOCS] Added settings page for ILM. (elastic#40880)
  [Docs] Remove extraneous text (elastic#40914)
  Move test classes to test root in Painless (elastic#40873)
  Fix date index name processor default date_formats (elastic#40915)
  Source additional files correctly in elasticsearch-cli (elastic#40890)
  Allow AVX-512 on JDK 11+ (elastic#40828)
  [Docs] Change example to show col headers (elastic#40822)
  Update apache httpclient to version 4.5.8 (elastic#40875)
  Update monitoring-kibana.json (elastic#40899)
  Introduce Delegating ActionListener Wrappers (elastic#40129)
  Deprecate old transport settings (elastic#40821)
  Add Kibana application privileges for monitoring and ml reserved roles (elastic#40651)
  Use Writeable for TransportReplAction derivatives (elastic#40894)
  Add test for HTTP and Transport TLS on basic license (elastic#40714)
  Remove unneded cluster config from test (elastic#40856)
  Make Fuzziness reject illegal values earlier (elastic#33511)
  Remove test-only customisation from TransReplAct (elastic#40863)
  Fix dense/sparse vector limit documentation (elastic#40852)
  Make -try xlint warning disabled by default. (elastic#40833)
  ...
  • Loading branch information
jasontedor committed Apr 6, 2019
2 parents 34daff9 + f92ebb2 commit cad967f
Show file tree
Hide file tree
Showing 432 changed files with 135,016 additions and 4,842 deletions.
2 changes: 1 addition & 1 deletion benchmarks/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ dependencies {
runtime 'org.apache.commons:commons-math3:3.2'
}

compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked,-processing"
compileJava.options.compilerArgs << "-Xlint:-cast,-rawtypes,-unchecked,-processing"
// enable the JMH's BenchmarkProcessor to generate the final benchmark classes
// needs to be added separately otherwise Gradle will quote it and javac will fail
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,7 @@ class BuildPlugin implements Plugin<Project> {
*/
// don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :)
// fail on all javac warnings
options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options,-deprecation' << '-Xdoclint:all' << '-Xdoclint:-missing'
options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options,-deprecation,-try' << '-Xdoclint:all' << '-Xdoclint:-missing'

// either disable annotation processor completely (default) or allow to enable them if an annotation processor is explicitly defined
if (options.compilerArgs.contains("-processor") == false) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,18 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import nebula.plugin.publishing.maven.MavenScmPlugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.publish.maven.MavenPublication
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
import org.gradle.api.publish.maven.tasks.GenerateMavenPom
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.bundling.Zip
import org.gradle.jvm.tasks.Jar
Expand All @@ -38,25 +43,29 @@ import java.util.regex.Pattern
/**
* Encapsulates build configuration for an Elasticsearch plugin.
*/
public class PluginBuildPlugin extends BuildPlugin {
class PluginBuildPlugin extends BuildPlugin {

public static final String PLUGIN_EXTENSION_NAME = 'esplugin'

@Override
public void apply(Project project) {
void apply(Project project) {
super.apply(project)

PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project)
configureDependencies(project)

// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
// so that the file name resolution for installing the plugin will be setup
project.afterEvaluate {
boolean isXPackModule = project.path.startsWith(':x-pack:plugin')
boolean isModule = project.path.startsWith(':modules:') || isXPackModule
String name = project.pluginProperties.extension.name
String name = extension.name
project.archivesBaseName = name

// set the project description so it will be picked up by publishing
project.description = project.pluginProperties.extension.description
project.description = extension.description

configurePublishing(project)
configurePublishing(project, extension)

if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) {
project.integTestCluster.dependsOn(project.tasks.bundlePlugin)
Expand All @@ -68,12 +77,23 @@ public class PluginBuildPlugin extends BuildPlugin {
} else {
project.tasks.integTest.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
throw new RuntimeException("Testclusters does not support modules yet");
project.testClusters.integTest.module(
project.file(project.tasks.bundlePlugin.archiveFile)
)
} else {
project.testClusters.integTest.plugin(
project.file(project.tasks.bundlePlugin.archiveFile)
)
}

project.extensions.getByType(PluginPropertiesExtension).extendedPlugins.each { pluginName ->
// Auto add dependent modules to the test cluster
if (project.findProject(":modules:${pluginName}") != null) {
project.testClusters.integTest.module(
project.file(project.project(":modules:${pluginName}").tasks.bundlePlugin.archiveFile)
)
}
}
}

project.tasks.run.dependsOn(project.tasks.bundlePlugin)
Expand All @@ -87,7 +107,7 @@ public class PluginBuildPlugin extends BuildPlugin {
}

if (isModule == false || isXPackModule) {
addNoticeGeneration(project)
addNoticeGeneration(project, extension)
}
}
project.testingConventions {
Expand All @@ -104,32 +124,28 @@ public class PluginBuildPlugin extends BuildPlugin {
}
}
createIntegTestTask(project)
createBundleTask(project)
createBundleTasks(project, extension)
project.configurations.getByName('default').extendsFrom(project.configurations.getByName('runtime'))
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
}

private void configurePublishing(Project project) {
private void configurePublishing(Project project, PluginPropertiesExtension extension) {
// Only configure publishing if applied externally
if (project.pluginProperties.extension.hasClientJar) {
if (extension.hasClientJar) {
project.plugins.apply(MavenScmPlugin.class)
// Only change Jar tasks, we don't want a -client zip so we can't change archivesBaseName
project.tasks.withType(Jar) {
baseName = baseName + "-client"
}
// always configure publishing for client jars
project.plugins.apply(MavenScmPlugin.class)
project.publishing.publications.nebula(MavenPublication).artifactId(
project.pluginProperties.extension.name + "-client"
)
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name + "-client")
project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
generatePOMTask.ext.pomFileName = "${project.archivesBaseName}-client-${project.versions.elasticsearch}.pom"
}
} else {
if (project.plugins.hasPlugin(MavenPublishPlugin)) {
project.publishing.publications.nebula(MavenPublication).artifactId(
project.pluginProperties.extension.name
)
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name)
}

}
Expand Down Expand Up @@ -164,24 +180,64 @@ public class PluginBuildPlugin extends BuildPlugin {
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
* metadata, properties, and packaging files
*/
private static void createBundleTask(Project project) {
private static void createBundleTasks(Project project, PluginPropertiesExtension extension) {
File pluginMetadata = project.file('src/main/plugin-metadata')
File templateFile = new File(project.buildDir, "templates/plugin-descriptor.properties")

// create tasks to build the properties file for this plugin
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
outputs.file(templateFile)
doLast {
InputStream resourceTemplate = PluginBuildPlugin.getResourceAsStream("/${templateFile.name}")
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}

// create a task to build the properties file for this plugin
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
Copy buildProperties = project.tasks.create('pluginProperties', Copy) {
dependsOn(copyPluginPropertiesTemplate)
from(templateFile)
into("${project.buildDir}/generated-resources")
}

project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}

Map<String, String> properties = [
'name': extension.name,
'description': extension.description,
'version': extension.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion': project.targetCompatibility as String,
'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','),
'hasNativeController': extension.hasNativeController,
'requiresKeystore': extension.requiresKeystore
]

buildProperties.configure {
expand(properties)
inputs.properties(properties)
}
}

// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
testSourceSet.output.dir(buildProperties.descriptorOutput.parentFile, builtBy: 'pluginProperties')
testSourceSet.output.dir(buildProperties.destinationDir, builtBy: buildProperties)
testSourceSet.resources.srcDir(pluginMetadata)

// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
from(buildProperties.descriptorOutput.parentFile) {
// plugin properties file
include(buildProperties.descriptorOutput.name)
}
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip) {
from buildProperties
from pluginMetadata // metadata (eg custom security policy)
/*
* If the plugin is using the shadow plugin then we need to bundle
Expand Down Expand Up @@ -223,23 +279,17 @@ public class PluginBuildPlugin extends BuildPlugin {
}
}

/** Adds nebula publishing task to generate a pom file for the plugin. */
protected static void addClientJarPomGeneration(Project project) {
project.plugins.apply(MavenScmPlugin.class)
project.description = project.pluginProperties.extension.description
}

/** Configure the pom for the main jar of this plugin */

protected void addNoticeGeneration(Project project) {
File licenseFile = project.pluginProperties.extension.licenseFile
protected void addNoticeGeneration(Project project, PluginPropertiesExtension extension) {
File licenseFile = extension.licenseFile
if (licenseFile != null) {
project.tasks.bundlePlugin.from(licenseFile.parentFile) {
include(licenseFile.name)
rename { 'LICENSE.txt' }
}
}
File noticeFile = project.pluginProperties.extension.noticeFile
File noticeFile = extension.noticeFile
if (noticeFile != null) {
NoticeTask generateNotice = project.tasks.create('generateNotice', NoticeTask.class)
generateNotice.inputFile = noticeFile
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,11 @@ class ClusterFormationTasks {
]
esConfig['node.max_local_storage_nodes'] = node.config.numNodes
esConfig['http.port'] = node.config.httpPort
esConfig['transport.tcp.port'] = node.config.transportPort
if (node.nodeVersion.onOrAfter('6.7.0')) {
esConfig['transport.port'] = node.config.transportPort
} else {
esConfig['transport.tcp.port'] = node.config.transportPort
}
// Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space
esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b'
esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.elasticsearch.gradle.test

import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.testclusters.ElasticsearchNode
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.gradle.api.DefaultTask
import org.gradle.api.Task
Expand Down Expand Up @@ -81,10 +81,10 @@ public class RestIntegTestTask extends DefaultTask {
throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null")
}
if (usesTestclusters == true) {
ElasticsearchNode node = project.testClusters."${name}"
runner.systemProperty('tests.rest.cluster', {node.allHttpSocketURI.join(",") })
runner.systemProperty('tests.config.dir', {node.getConfigDir()})
runner.systemProperty('tests.cluster', {node.transportPortURI})
ElasticsearchCluster cluster = project.testClusters."${name}"
runner.systemProperty('tests.rest.cluster', {cluster.allHttpSocketURI.join(",") })
runner.systemProperty('tests.config.dir', {cluster.singleNode().getConfigDir()})
runner.systemProperty('tests.cluster', {cluster.transportPortURI})
} else {
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
// this is more realistic than just talking to a single node
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,13 @@ public void checkInvalidPatterns() throws IOException {
.collect(Collectors.toList());

String path = getProject().getRootProject().getProjectDir().toURI().relativize(f.toURI()).toString();
failures = invalidLines.stream()
failures.addAll(invalidLines.stream()
.map(l -> new AbstractMap.SimpleEntry<>(l+1, lines.get(l)))
.flatMap(kv -> patterns.entrySet().stream()
.filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find())
.map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path)
)
.collect(Collectors.toList());
.collect(Collectors.toList()));
}
if (failures.isEmpty() == false) {
throw new GradleException("Found invalid patterns:\n" + String.join("\n", failures));
Expand Down
Loading

0 comments on commit cad967f

Please sign in to comment.