Skip to content

Commit

Permalink
[Backport] Consolidate docker availability logic (#52656)
Browse files Browse the repository at this point in the history
  • Loading branch information
mark-vieira committed Feb 23, 2020
1 parent 8563bec commit 21eea3b
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 35 deletions.
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure

plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,11 @@
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor;
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
import org.elasticsearch.gradle.ElasticsearchDistribution.Type;
import org.elasticsearch.gradle.docker.DockerSupportPlugin;
import org.elasticsearch.gradle.docker.DockerSupportService;
import org.elasticsearch.gradle.info.BuildParams;
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.tool.Boilerplate;
import org.gradle.api.GradleException;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
Expand All @@ -38,6 +41,7 @@
import org.gradle.api.file.FileTree;
import org.gradle.api.file.RelativePath;
import org.gradle.api.plugins.ExtraPropertiesExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.Sync;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.authentication.http.HttpHeaderAuthentication;
Expand Down Expand Up @@ -72,11 +76,17 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
public void apply(Project project) {
// this is needed for isInternal
project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class);
project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class);

Provider<DockerSupportService> dockerSupport = Boilerplate.getBuildService(
project.getGradle().getSharedServices(),
DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME
);

distributionsContainer = project.container(ElasticsearchDistribution.class, name -> {
Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name);
Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name);
return new ElasticsearchDistribution(name, project.getObjects(), fileConfiguration, extractedConfiguration);
return new ElasticsearchDistribution(name, project.getObjects(), dockerSupport, fileConfiguration, extractedConfiguration);
});
project.getExtensions().add(CONTAINER_NAME, distributionsContainer);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,16 @@

package org.elasticsearch.gradle;

import org.elasticsearch.gradle.docker.DockerSupportService;
import org.gradle.api.Buildable;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskDependency;

import java.io.File;
import java.util.Collections;
import java.util.Iterator;
import java.util.Locale;

Expand Down Expand Up @@ -110,6 +113,7 @@ public String toString() {
}

private final String name;
private final Provider<DockerSupportService> dockerSupport;
// pkg private so plugin can configure
final Configuration configuration;
private final Extracted extracted;
Expand All @@ -119,21 +123,25 @@ public String toString() {
private final Property<Platform> platform;
private final Property<Flavor> flavor;
private final Property<Boolean> bundledJdk;
private final Property<Boolean> failIfUnavailable;

ElasticsearchDistribution(
String name,
ObjectFactory objectFactory,
Provider<DockerSupportService> dockerSupport,
Configuration fileConfiguration,
Configuration extractedConfiguration
) {
this.name = name;
this.dockerSupport = dockerSupport;
this.configuration = fileConfiguration;
this.version = objectFactory.property(String.class).convention(VersionProperties.getElasticsearch());
this.type = objectFactory.property(Type.class);
this.type.convention(Type.ARCHIVE);
this.platform = objectFactory.property(Platform.class);
this.flavor = objectFactory.property(Flavor.class);
this.bundledJdk = objectFactory.property(Boolean.class);
this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true);
this.extracted = new Extracted(extractedConfiguration);
}

Expand Down Expand Up @@ -182,6 +190,14 @@ public void setBundledJdk(Boolean bundledJdk) {
this.bundledJdk.set(bundledJdk);
}

public boolean getFailIfUnavailable() {
return this.failIfUnavailable.get();
}

public void setFailIfUnavailable(boolean failIfUnavailable) {
this.failIfUnavailable.set(failIfUnavailable);
}

@Override
public String toString() {
return configuration.getSingleFile().toString();
Expand All @@ -203,6 +219,13 @@ public Extracted getExtracted() {

@Override
public TaskDependency getBuildDependencies() {
// For non-required Docker distributions, skip building the distribution is Docker is unavailable
if (getType() == Type.DOCKER
&& getFailIfUnavailable() == false
&& dockerSupport.get().getDockerAvailability().isAvailable == false) {
return task -> Collections.emptySet();
}

return configuration.getBuildDependencies();
}

Expand All @@ -222,7 +245,7 @@ void finalizeValues() {
if (getType() == Type.INTEG_TEST_ZIP) {
if (platform.getOrNull() != null) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
"platform cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
if (flavor.getOrNull() != null) {
Expand All @@ -232,12 +255,18 @@ void finalizeValues() {
}
if (bundledJdk.getOrNull() != null) {
throw new IllegalArgumentException(
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
);
}
return;
}

if (getType() != Type.DOCKER && failIfUnavailable.get() == false) {
throw new IllegalArgumentException(
"failIfUnavailable cannot be 'false' on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}

if (getType() == Type.ARCHIVE) {
// defaults for archive, set here instead of via convention so integ-test-zip can verify they are not set
if (platform.isPresent() == false) {
Expand All @@ -246,7 +275,12 @@ void finalizeValues() {
} else { // rpm, deb or docker
if (platform.isPresent()) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]"
"platform cannot be set on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
);
}
if (getType() == Type.DOCKER && bundledJdk.isPresent()) {
throw new IllegalArgumentException(
"bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [docker]"
);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@

import java.io.File;

import static org.elasticsearch.gradle.tool.Boilerplate.noop;

public class TestClustersPlugin implements Plugin<Project> {

public static final String EXTENSION_NAME = "testClusters";
Expand Down Expand Up @@ -72,7 +74,7 @@ public void apply(Project project) {
createListClustersTask(project, container);

// register cluster registry as a global build service
project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, spec -> {});
project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop());

// register throttle so we only run at most max-workers/2 nodes concurrently
project.getGradle()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@

public abstract class Boilerplate {

public static <T> Action<T> noop() {
return t -> {};
}

public static SourceSetContainer getJavaSourceSets(Project project) {
return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets();
}
Expand Down
24 changes: 15 additions & 9 deletions distribution/docker/build.gradle
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.info.BuildParams
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin

apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.distribution-download'

testFixtures.useFixture()

Expand Down Expand Up @@ -105,10 +106,19 @@ task copyKeystore(type: Sync) {
}
}

preProcessFixture {
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn assemble
elasticsearch_distributions {
Flavor.values().each { distroFlavor ->
"docker_$distroFlavor" {
flavor = distroFlavor
type = 'docker'
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
}
}
}

preProcessFixture {
dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss
dependsOn copyKeystore
doLast {
// tests expect to have an empty repo
Expand Down Expand Up @@ -140,16 +150,13 @@ task integTest(type: Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
// don't add the tasks to build the docker images if we have no way of testing them
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn assemble
}
}

check.dependsOn integTest

void addBuildDockerImage(final boolean oss) {
final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) {
ext.requiresDocker = true // mark this task as requiring docker to execute
inputs.files(tasks.named(taskName("copy", oss, "DockerContext")))
List<String> tags
if (oss) {
Expand Down Expand Up @@ -179,7 +186,6 @@ void addBuildDockerImage(final boolean oss) {
}
}
assemble.dependsOn(buildDockerImageTask)
BuildPlugin.requireDocker(buildDockerImageTask)
}

for (final boolean oss : [false, true]) {
Expand Down
2 changes: 1 addition & 1 deletion plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
maxWaitInSeconds 60
onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false }
onlyIf { BuildParams.inFipsJvm == false }
waitCondition = { fixture, ant ->
// the hdfs.MiniHDFS fixture writes the ports file when
// it's ready, so we can just wait for the file to exist
Expand Down
38 changes: 18 additions & 20 deletions qa/remote-clusters/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin

apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.test.fixtures'
apply plugin: 'elasticsearch.distribution-download'

testFixtures.useFixture()

Expand All @@ -42,15 +44,17 @@ task copyKeystore(type: Sync) {
}
}

preProcessFixture {
if (TestFixturesPlugin.dockerComposeSupported()) {
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
dependsOn ":distribution:docker:buildDockerImage"
} else {
dependsOn ":distribution:docker:buildOssDockerImage"
}
elasticsearch_distributions {
docker {
type = 'docker'
flavor = System.getProperty('tests.distribution', 'default')
version = VersionProperties.getElasticsearch()
failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
}
dependsOn copyKeystore
}

preProcessFixture {
dependsOn copyKeystore, elasticsearch_distributions.docker
doLast {
// tests expect to have an empty repo
project.delete(
Expand All @@ -68,14 +72,12 @@ preProcessFixture {
}
}

if (TestFixturesPlugin.dockerComposeSupported()) {
dockerCompose {
tcpPortsToIgnoreWhenWaiting = [9600, 9601]
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
useComposeFiles = ['docker-compose.yml']
} else {
useComposeFiles = ['docker-compose-oss.yml']
}
dockerCompose {
tcpPortsToIgnoreWhenWaiting = [9600, 9601]
if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) {
useComposeFiles = ['docker-compose.yml']
} else {
useComposeFiles = ['docker-compose-oss.yml']
}
}

Expand All @@ -100,10 +102,6 @@ task integTest(type: Test) {
outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
maxParallelForks = '1'
include '**/*IT.class'
// don't add the tasks to build the docker images if we have no way of testing them
if (TestFixturesPlugin.dockerComposeSupported()) {
dependsOn ":distribution:docker:buildDockerImage"
}
}

check.dependsOn integTest

0 comments on commit 21eea3b

Please sign in to comment.