diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy index 2268715fc..d1adf27c1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy @@ -729,10 +729,13 @@ class BuildPlugin implements Plugin { } private static void updateVariantPomLocationAndArtifactId(Project project, MavenPublication publication, SparkVariant variant) { + // Add variant classifier to the pom file name if required + String classifier = variant.shouldClassifySparkVersion() && variant.isDefaultVariant() == false ? "-${variant.getName()}" : '' + String filename = "${project.archivesBaseName}_${variant.scalaMajorVersion}-${project.getVersion()}${classifier}" // Fix the pom name project.tasks.withType(GenerateMavenPom).all { GenerateMavenPom pom -> if (pom.name == "generatePomFileFor${publication.name.capitalize()}Publication") { - pom.destination = project.provider({"${project.buildDir}/distributions/${project.archivesBaseName}_${variant.scalaMajorVersion}-${project.getVersion()}.pom"}) + pom.destination = project.provider({"${project.buildDir}/distributions/${filename}.pom"}) } } // Fix the artifactId. Note: The publishing task does not like this happening. Hence it is disabled. diff --git a/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java b/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java index e95375e4c..cb83ad06e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java @@ -81,19 +81,21 @@ public static class SparkVariant { private final String scalaVersion; private final String scalaMajorVersion; private final String capability; + private final boolean classifySparkVersion; public SparkVariant(String name) { throw new GradleException("Cannot create variant named [" + name + "]. Do not instantiate objects directly. " + "You must configure this via the SparkVariantPluginExtension."); } - public SparkVariant(CharSequence name, boolean isDefaultVariant, String sparkVersion, String scalaVersion, String capability) { + public SparkVariant(CharSequence name, boolean isDefaultVariant, String sparkVersion, String scalaVersion, String capability, boolean classifySparkVersion) { this.name = name; this.isDefaultVariant = isDefaultVariant; this.sparkVersion = sparkVersion; this.scalaVersion = scalaVersion; this.scalaMajorVersion = scalaVersion.substring(0, scalaVersion.lastIndexOf('.')); this.capability = capability; + this.classifySparkVersion = classifySparkVersion; } public String getName() { @@ -124,6 +126,10 @@ public String getCapability() { return capability; } + public boolean shouldClassifySparkVersion() { + return classifySparkVersion; + } + public String getSourceSetName(String sourceSetName) { String result; if (isDefaultVariant) { @@ -184,22 +190,38 @@ public void capabilityGroup(String capability) { } public SparkVariant setDefaultVariant(String variantName, String sparkVersion, String scalaVersion) { + return setDefaultVariant(variantName, sparkVersion, scalaVersion, false); + } + + public SparkVariant setCoreDefaultVariant(String variantName, String sparkVersion, String scalaVersion) { + return setDefaultVariant(variantName, sparkVersion, scalaVersion, true); + } + + public SparkVariant setDefaultVariant(String variantName, String sparkVersion, String scalaVersion, boolean classifySparkVersion) { if (defaultVariant != null) { throw new GradleException("Cannot set default variant multiple times"); } if (capability == null) { throw new GradleException("Must set capability group before adding variant definitions"); } - defaultVariant = new SparkVariant(variantName, true, sparkVersion, scalaVersion, capability); + defaultVariant = new SparkVariant(variantName, true, sparkVersion, scalaVersion, capability, classifySparkVersion); variants.add(defaultVariant); return defaultVariant; } public SparkVariant addFeatureVariant(String variantName, String sparkVersion, String scalaVersion) { + return addFeatureVariant(variantName, sparkVersion, scalaVersion, false); + } + + public SparkVariant addCoreFeatureVariant(String variantName, String sparkVersion, String scalaVersion) { + return addFeatureVariant(variantName, sparkVersion, scalaVersion, true); + } + + public SparkVariant addFeatureVariant(String variantName, String sparkVersion, String scalaVersion, boolean classifySparkVersion) { if (capability == null) { throw new GradleException("Must set capability group before adding variant definitions"); } - SparkVariant variant = new SparkVariant(variantName, false, sparkVersion, scalaVersion, capability); + SparkVariant variant = new SparkVariant(variantName, false, sparkVersion, scalaVersion, capability, classifySparkVersion); variants.add(variant); return variant; } @@ -437,7 +459,8 @@ private static void registerVariantScaladoc(Project project, TaskContainer tasks }); } - private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVariant) { + private static void removeVariantNameFromClassifier(Jar jar, SparkVariant sparkVariant) { + // the default variant doesn't have classifiers on it to remove if (sparkVariant.isDefaultVariant() == false) { String classifier = jar.getArchiveClassifier().get(); classifier = classifier.replace(sparkVariant.name, ""); @@ -446,6 +469,12 @@ private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVarian } jar.getArchiveClassifier().set(classifier); } + } + + private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVariant) { + if (sparkVariant.shouldClassifySparkVersion() == false) { + removeVariantNameFromClassifier(jar, sparkVariant); + } String baseName = jar.getArchiveBaseName().get(); baseName = baseName + "_" + sparkVariant.scalaMajorVersion; jar.getArchiveBaseName().set(baseName); @@ -453,8 +482,8 @@ private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVarian private static void configureScalaJarClassifiers(Project project, final SparkVariant sparkVariant) { TaskCollection jars = project.getTasks().withType(Jar.class); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("jar")), sparkVariant); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("javadocJar")), sparkVariant); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("sourcesJar")), sparkVariant); + jars.named(sparkVariant.taskName("jar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); + jars.named(sparkVariant.taskName("javadocJar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); + jars.named(sparkVariant.taskName("sourcesJar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); } } diff --git a/spark/core/build.gradle b/spark/core/build.gradle index 24799c45a..361af40d1 100644 --- a/spark/core/build.gradle +++ b/spark/core/build.gradle @@ -9,10 +9,10 @@ apply plugin: 'spark.variants' sparkVariants { capabilityGroup 'org.elasticsearch.spark.variant' - setDefaultVariant "spark20scala211", spark24Version, scala211Version - addFeatureVariant "spark20scala210", spark22Version, scala210Version - addFeatureVariant "spark13scala211", spark13Version, scala211Version - addFeatureVariant "spark13scala210", spark13Version, scala210Version + setCoreDefaultVariant "spark20scala211", spark24Version, scala211Version + addCoreFeatureVariant "spark20scala210", spark22Version, scala210Version + addCoreFeatureVariant "spark13scala211", spark13Version, scala211Version + addCoreFeatureVariant "spark13scala210", spark13Version, scala210Version all { SparkVariantPlugin.SparkVariant variant ->