Skip to content

[7.17] Backport maven central aggregation zip #2418

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .buildkite/dra.sh
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ mkdir localRepo
wget --quiet "https://artifacts-$DRA_WORKFLOW.elastic.co/elasticsearch/${ES_BUILD_ID}/maven/org/elasticsearch/gradle/build-tools/${HADOOP_VERSION}${VERSION_SUFFIX}/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar" \
-O "localRepo/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar"

./gradlew -S -PlocalRepo=true "${BUILD_ARGS}" -Dorg.gradle.warning.mode=summary -Dcsv="$WORKSPACE/build/distributions/dependencies-${HADOOP_VERSION}${VERSION_SUFFIX}.csv" :dist:generateDependenciesReport distribution
./gradlew -S -PlocalRepo=true "${BUILD_ARGS}" -Dorg.gradle.warning.mode=summary -Dcsv="$WORKSPACE/build/distributions/dependencies-${HADOOP_VERSION}${VERSION_SUFFIX}.csv" :dist:generateDependenciesReport distribution zipAggregation

# Allow other users access to read the artifacts so they are readable in the container
find "$WORKSPACE" -type f -path "*/build/distributions/*" -exec chmod a+r {} \;
Expand Down
25 changes: 25 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
@@ -1,11 +1,36 @@
import org.elasticsearch.hadoop.gradle.buildtools.ConcatFilesTask
import java.lang.management.ManagementFactory;
import java.time.LocalDateTime;
import org.elasticsearch.gradle.VersionProperties

description = 'Elasticsearch for Apache Hadoop'

apply plugin: 'es.hadoop.build.root'
apply plugin: 'com.gradleup.nmcp.aggregation'

defaultTasks 'build'

repositories {
mavenCentral()
}
dependencies {
nmcpAggregation(project(":dist"))
nmcpAggregation(project(":elasticsearch-hadoop-mr"))
nmcpAggregation(project(":elasticsearch-hadoop-pig"))
nmcpAggregation(project(":elasticsearch-hadoop-hive"))
nmcpAggregation(project(":elasticsearch-spark-20"))
nmcpAggregation(project(":elasticsearch-spark-13"))
nmcpAggregation(project(":elasticsearch-spark-30"))
nmcpAggregation(project(":elasticsearch-storm"))
}

tasks.named('zipAggregation').configure {
archiveFileName.unset();
archiveBaseName.set("elasticsearch-hadoop-maven-aggregation")
destinationDirectory.set(layout.buildDirectory.dir("distributions"));
archiveVersion.set(VersionProperties.elasticsearch)
}

allprojects {
group = "org.elasticsearch"
tasks.withType(AbstractCopyTask) {
Expand Down
1 change: 1 addition & 0 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ dependencies {
// Required for dependency licenses task
implementation 'org.apache.rat:apache-rat:0.11'
implementation 'commons-codec:commons-codec:1.12'
implementation 'com.gradleup.nmcp:nmcp:0.1.5'

if (localRepo) {
implementation name: "build-tools-${buildToolsVersion}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -581,6 +581,7 @@ class BuildPlugin implements Plugin<Project> {

private void configureMaven(Project project) {
project.getPluginManager().apply("maven-publish")
project.getPluginManager().apply("com.gradleup.nmcp")

// Configure Maven publication
project.publishing {
Expand All @@ -601,13 +602,6 @@ class BuildPlugin implements Plugin<Project> {
// Configure Maven Pom
configurePom(project, project.publishing.publications.main)

// Disable the publishing tasks since we only need the pom generation tasks.
// If we are working with a project that has a scala variant (see below), we need to modify the pom's
// artifact id which the publish task does not like (it fails validation when run).
project.getTasks().withType(PublishToMavenRepository) { PublishToMavenRepository m ->
m.enabled = false
}

// Configure Scala Variants if present
project.getPlugins().withType(SparkVariantPlugin).whenPluginAdded {
// Publishing gets weird when you introduce variants into the project.
Expand All @@ -622,7 +616,7 @@ class BuildPlugin implements Plugin<Project> {

// Main variant needs the least configuration on its own, since it is the default publication created above.
sparkVariants.defaultVariant { SparkVariant variant ->
updateVariantPomLocationAndArtifactId(project, project.publishing.publications.main, variant)
updateVariantArtifactId(project, project.publishing.publications.main, variant)
}

// For each spark variant added, we need to do a few things:
Expand Down Expand Up @@ -672,8 +666,9 @@ class BuildPlugin implements Plugin<Project> {
from variantComponent
suppressAllPomMetadataWarnings() // We get it. Gradle metadata is better than Maven Poms
}
variantPublication.setAlias(true)
configurePom(project, variantPublication)
updateVariantPomLocationAndArtifactId(project, variantPublication, variant)
updateVariantArtifactId(project, variantPublication, variant)
}
}
}
Expand All @@ -686,14 +681,6 @@ class BuildPlugin implements Plugin<Project> {
}

private static void configurePom(Project project, MavenPublication publication) {
// Set the pom's destination to the distribution directory
project.tasks.withType(GenerateMavenPom).all { GenerateMavenPom pom ->
if (pom.name == "generatePomFileFor${publication.name.capitalize()}Publication") {
BasePluginExtension baseExtension = project.getExtensions().getByType(BasePluginExtension.class);
pom.destination = project.provider({"${project.buildDir}/distributions/${baseExtension.archivesName.get()}-${project.getVersion()}.pom"})
}
}

// add all items necessary for publication
Provider<String> descriptionProvider = project.provider({ project.getDescription() })
MavenPom pom = publication.getPom()
Expand Down Expand Up @@ -746,23 +733,12 @@ class BuildPlugin implements Plugin<Project> {
}
}

private static void updateVariantPomLocationAndArtifactId(Project project, MavenPublication publication, SparkVariant variant) {
private static void updateVariantArtifactId(Project project, MavenPublication publication, SparkVariant variant) {
// Add variant classifier to the pom file name if required
String classifier = variant.shouldClassifySparkVersion() && variant.isDefaultVariant() == false ? "-${variant.getName()}" : ''
BasePluginExtension baseExtension = project.getExtensions().getByType(BasePluginExtension.class);
String filename = "${baseExtension.archivesName.get()}_${variant.scalaMajorVersion}-${project.getVersion()}${classifier}"
// Fix the pom name
project.tasks.withType(GenerateMavenPom).all { GenerateMavenPom pom ->
if (pom.name == "generatePomFileFor${publication.name.capitalize()}Publication") {
pom.destination = project.provider({"${project.buildDir}/distributions/${filename}.pom"})
}
}
// Fix the artifactId. Note: The publishing task does not like this happening. Hence it is disabled.
publication.getPom().withXml { XmlProvider xml ->
Node root = xml.asNode()
Node artifactId = (root.get('artifactId') as NodeList).get(0) as Node
artifactId.setValue("${baseExtension.archivesName.get()}_${variant.scalaMajorVersion}")
}
// Fix the artifact id
publication.setArtifactId("${baseExtension.archivesName.get()}_${variant.scalaMajorVersion}")
}

/**
Expand Down
5 changes: 1 addition & 4 deletions dist/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ javadoc {
publishing {
publications {
main {
artifactId = "elasticsearch-hadoop"
getPom().withXml { XmlProvider xml ->
Node root = xml.asNode()

Expand All @@ -148,10 +149,6 @@ publishing {
Node repository = repositories.appendNode('repository')
repository.appendNode('id', 'clojars.org')
repository.appendNode('url', 'https://clojars.org/repo')
BasePluginExtension baseExtension = project.getExtensions().getByType(BasePluginExtension.class)

// Correct the artifact Id, otherwise it is listed as 'dist'
root.get('artifactId').get(0).setValue(baseExtension.archivesName.get())
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion mr/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ dependencies {
exclude group: 'com.fasterxml.jackson.core'
}

embedded(project(path: ":thirdparty", configuration: "shadow"))
embedded(project(path: ":thirdparty", configuration: "nmcp"))
implementation("commons-logging:commons-logging:1.1.1")
implementation("commons-codec:commons-codec:1.4")
implementation("javax.xml.bind:jaxb-api:2.3.1")
Expand Down
17 changes: 17 additions & 0 deletions spark/sql-13/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -197,3 +197,20 @@ sparkVariants {
}
}
}

tasks.register('copyPoms', Copy) {
// from(tasks.named('generatePomFileForMainPublication')) {
// rename 'pom-default.xml', "elasticsearch-spark-20_2.12-${project.getVersion()}.pom"
// }
// from(tasks.named('generatePomFileForSpark13scala211Publication')) {
// rename 'pom-default.xml', "elasticsearch-spark-13_2.11-${project.getVersion()}.pom"
// }
from(tasks.named('generatePomFileForSpark13scala210Publication')) {
rename 'pom-default.xml', "elasticsearch-spark-13_2.10-${project.getVersion()}.pom"
}
into(new File(project.buildDir, 'distributions'))
}

tasks.named('distribution').configure {
dependsOn 'copyPoms'
}
17 changes: 17 additions & 0 deletions spark/sql-20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -218,3 +218,20 @@ sparkVariants {
}
}
}

tasks.register('copyPoms', Copy) {
// from(tasks.named('generatePomFileForMainPublication')) {
// rename 'pom-default.xml', "elasticsearch-spark-20_2.12-${project.getVersion()}.pom"
// }
from(tasks.named('generatePomFileForSpark20scala211Publication')) {
rename 'pom-default.xml', "elasticsearch-spark-20_2.11-${project.getVersion()}.pom"
}
from(tasks.named('generatePomFileForSpark20scala210Publication')) {
rename 'pom-default.xml', "elasticsearch-spark-20_2.10-${project.getVersion()}.pom"
}
into(new File(project.buildDir, 'distributions'))
}

tasks.named('distribution').configure {
dependsOn 'copyPoms'
}
12 changes: 12 additions & 0 deletions spark/sql-30/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -202,3 +202,15 @@ sparkVariants {
}
}
}


tasks.register('copyPoms', Copy) {
from(tasks.named('generatePomFileForMainPublication')) {
rename 'pom-default.xml', "elasticsearch-spark-30_2.12-${project.getVersion()}.pom"
}
into(new File(project.buildDir, 'distributions'))
}

tasks.named('distribution').configure {
dependsOn 'copyPoms'
}
9 changes: 9 additions & 0 deletions thirdparty/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,13 @@ configurations {
transitive = false
canBeResolved = true
}
nmcp {
canBeConsumed = true
canBeResolved = false
// attributes {
// attribute(Usage.USAGE_ATTRIBUTE, project.getObjects().named(Usage.class, "nmcp"))
// }
}
implementation {
extendsFrom shaded
}
Expand All @@ -46,3 +53,5 @@ shadowJar {
relocate 'org.apache.commons.httpclient', 'org.elasticsearch.hadoop.thirdparty.apache.commons.httpclient'
relocate 'org.codehaus.jackson', 'org.elasticsearch.hadoop.thirdparty.codehaus.jackson'
}

artifacts.add("nmcp", shadowJar)