From 38c25a1b9f2ddc4e3329440d5765daeff9087c3f Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 20 Jan 2021 14:58:07 -0500 Subject: [PATCH 1/2] The spark core project was producing core artifacts with overlapping names (since the spark variant was removed from the name). --- spark/core/build.gradle | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/spark/core/build.gradle b/spark/core/build.gradle index 24799c45a..69b2a720a 100644 --- a/spark/core/build.gradle +++ b/spark/core/build.gradle @@ -122,6 +122,23 @@ sparkVariants { scaladoc { title = "${rootProject.description} ${version} API" } + + // The core project is strange since there are multiple variants with the same scala version present. They + // should be differentiated so they don't over write each other + def correctScalaJarClassifiers = { Jar jar -> + String classifier = jar.getArchiveClassifier().get() + if (classifier == null || classifier.isEmpty()) { + classifier = variant.name + } else { + classifier = "${variant.name}-${classifier}" + } + jar.getArchiveClassifier().set(classifier) + } + + TaskCollection jars = project.getTasks().withType(Jar.class) + correctScalaJarClassifiers(jars.getByName(variant.taskName("jar"))) + correctScalaJarClassifiers(jars.getByName(variant.taskName("javadocJar"))) + correctScalaJarClassifiers(jars.getByName(variant.taskName("sourcesJar"))) } } From 06ed0c1fdfd3951130943b3d8de15f07e4b6922a Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 20 Jan 2021 16:47:05 -0500 Subject: [PATCH 2/2] Push the classifier logic down into the plugin to fix the poms as well --- .../hadoop/gradle/BuildPlugin.groovy | 5 ++- .../gradle/scala/SparkVariantPlugin.java | 43 ++++++++++++++++--- spark/core/build.gradle | 25 ++--------- 3 files changed, 44 insertions(+), 29 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy index 2268715fc..d1adf27c1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy @@ -729,10 +729,13 @@ class BuildPlugin implements Plugin { } private static void updateVariantPomLocationAndArtifactId(Project project, MavenPublication publication, SparkVariant variant) { + // Add variant classifier to the pom file name if required + String classifier = variant.shouldClassifySparkVersion() && variant.isDefaultVariant() == false ? "-${variant.getName()}" : '' + String filename = "${project.archivesBaseName}_${variant.scalaMajorVersion}-${project.getVersion()}${classifier}" // Fix the pom name project.tasks.withType(GenerateMavenPom).all { GenerateMavenPom pom -> if (pom.name == "generatePomFileFor${publication.name.capitalize()}Publication") { - pom.destination = project.provider({"${project.buildDir}/distributions/${project.archivesBaseName}_${variant.scalaMajorVersion}-${project.getVersion()}.pom"}) + pom.destination = project.provider({"${project.buildDir}/distributions/${filename}.pom"}) } } // Fix the artifactId. Note: The publishing task does not like this happening. Hence it is disabled. diff --git a/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java b/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java index e95375e4c..cb83ad06e 100644 --- a/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/hadoop/gradle/scala/SparkVariantPlugin.java @@ -81,19 +81,21 @@ public static class SparkVariant { private final String scalaVersion; private final String scalaMajorVersion; private final String capability; + private final boolean classifySparkVersion; public SparkVariant(String name) { throw new GradleException("Cannot create variant named [" + name + "]. Do not instantiate objects directly. " + "You must configure this via the SparkVariantPluginExtension."); } - public SparkVariant(CharSequence name, boolean isDefaultVariant, String sparkVersion, String scalaVersion, String capability) { + public SparkVariant(CharSequence name, boolean isDefaultVariant, String sparkVersion, String scalaVersion, String capability, boolean classifySparkVersion) { this.name = name; this.isDefaultVariant = isDefaultVariant; this.sparkVersion = sparkVersion; this.scalaVersion = scalaVersion; this.scalaMajorVersion = scalaVersion.substring(0, scalaVersion.lastIndexOf('.')); this.capability = capability; + this.classifySparkVersion = classifySparkVersion; } public String getName() { @@ -124,6 +126,10 @@ public String getCapability() { return capability; } + public boolean shouldClassifySparkVersion() { + return classifySparkVersion; + } + public String getSourceSetName(String sourceSetName) { String result; if (isDefaultVariant) { @@ -184,22 +190,38 @@ public void capabilityGroup(String capability) { } public SparkVariant setDefaultVariant(String variantName, String sparkVersion, String scalaVersion) { + return setDefaultVariant(variantName, sparkVersion, scalaVersion, false); + } + + public SparkVariant setCoreDefaultVariant(String variantName, String sparkVersion, String scalaVersion) { + return setDefaultVariant(variantName, sparkVersion, scalaVersion, true); + } + + public SparkVariant setDefaultVariant(String variantName, String sparkVersion, String scalaVersion, boolean classifySparkVersion) { if (defaultVariant != null) { throw new GradleException("Cannot set default variant multiple times"); } if (capability == null) { throw new GradleException("Must set capability group before adding variant definitions"); } - defaultVariant = new SparkVariant(variantName, true, sparkVersion, scalaVersion, capability); + defaultVariant = new SparkVariant(variantName, true, sparkVersion, scalaVersion, capability, classifySparkVersion); variants.add(defaultVariant); return defaultVariant; } public SparkVariant addFeatureVariant(String variantName, String sparkVersion, String scalaVersion) { + return addFeatureVariant(variantName, sparkVersion, scalaVersion, false); + } + + public SparkVariant addCoreFeatureVariant(String variantName, String sparkVersion, String scalaVersion) { + return addFeatureVariant(variantName, sparkVersion, scalaVersion, true); + } + + public SparkVariant addFeatureVariant(String variantName, String sparkVersion, String scalaVersion, boolean classifySparkVersion) { if (capability == null) { throw new GradleException("Must set capability group before adding variant definitions"); } - SparkVariant variant = new SparkVariant(variantName, false, sparkVersion, scalaVersion, capability); + SparkVariant variant = new SparkVariant(variantName, false, sparkVersion, scalaVersion, capability, classifySparkVersion); variants.add(variant); return variant; } @@ -437,7 +459,8 @@ private static void registerVariantScaladoc(Project project, TaskContainer tasks }); } - private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVariant) { + private static void removeVariantNameFromClassifier(Jar jar, SparkVariant sparkVariant) { + // the default variant doesn't have classifiers on it to remove if (sparkVariant.isDefaultVariant() == false) { String classifier = jar.getArchiveClassifier().get(); classifier = classifier.replace(sparkVariant.name, ""); @@ -446,6 +469,12 @@ private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVarian } jar.getArchiveClassifier().set(classifier); } + } + + private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVariant) { + if (sparkVariant.shouldClassifySparkVersion() == false) { + removeVariantNameFromClassifier(jar, sparkVariant); + } String baseName = jar.getArchiveBaseName().get(); baseName = baseName + "_" + sparkVariant.scalaMajorVersion; jar.getArchiveBaseName().set(baseName); @@ -453,8 +482,8 @@ private static void correctScalaJarClassifiers(Jar jar, SparkVariant sparkVarian private static void configureScalaJarClassifiers(Project project, final SparkVariant sparkVariant) { TaskCollection jars = project.getTasks().withType(Jar.class); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("jar")), sparkVariant); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("javadocJar")), sparkVariant); - correctScalaJarClassifiers(jars.getByName(sparkVariant.taskName("sourcesJar")), sparkVariant); + jars.named(sparkVariant.taskName("jar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); + jars.named(sparkVariant.taskName("javadocJar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); + jars.named(sparkVariant.taskName("sourcesJar"), (Jar jar) -> correctScalaJarClassifiers(jar, sparkVariant)); } } diff --git a/spark/core/build.gradle b/spark/core/build.gradle index 69b2a720a..361af40d1 100644 --- a/spark/core/build.gradle +++ b/spark/core/build.gradle @@ -9,10 +9,10 @@ apply plugin: 'spark.variants' sparkVariants { capabilityGroup 'org.elasticsearch.spark.variant' - setDefaultVariant "spark20scala211", spark24Version, scala211Version - addFeatureVariant "spark20scala210", spark22Version, scala210Version - addFeatureVariant "spark13scala211", spark13Version, scala211Version - addFeatureVariant "spark13scala210", spark13Version, scala210Version + setCoreDefaultVariant "spark20scala211", spark24Version, scala211Version + addCoreFeatureVariant "spark20scala210", spark22Version, scala210Version + addCoreFeatureVariant "spark13scala211", spark13Version, scala211Version + addCoreFeatureVariant "spark13scala210", spark13Version, scala210Version all { SparkVariantPlugin.SparkVariant variant -> @@ -122,23 +122,6 @@ sparkVariants { scaladoc { title = "${rootProject.description} ${version} API" } - - // The core project is strange since there are multiple variants with the same scala version present. They - // should be differentiated so they don't over write each other - def correctScalaJarClassifiers = { Jar jar -> - String classifier = jar.getArchiveClassifier().get() - if (classifier == null || classifier.isEmpty()) { - classifier = variant.name - } else { - classifier = "${variant.name}-${classifier}" - } - jar.getArchiveClassifier().set(classifier) - } - - TaskCollection jars = project.getTasks().withType(Jar.class) - correctScalaJarClassifiers(jars.getByName(variant.taskName("jar"))) - correctScalaJarClassifiers(jars.getByName(variant.taskName("javadocJar"))) - correctScalaJarClassifiers(jars.getByName(variant.taskName("sourcesJar"))) } }