Skip to content

Commit ec78bf6

Browse files
authored
Migrate CI pipelines to Buildkite (#2114)
1 parent 998ddd7 commit ec78bf6

11 files changed

+411
-3
lines changed

.buildkite/branches.sh

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#!/bin/bash
2+
3+
# This determines which branches will have pipelines triggered periodically, for tests and dra workflows.
4+
BRANCHES=(main 8.9 7.17)

.buildkite/dra-workflow.trigger.sh

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
echo "steps:"
6+
7+
source .buildkite/branches.sh
8+
9+
BRANCHES=(main 8.9 7.17)
10+
for BRANCH in "${BRANCHES[@]}"; do
11+
cat <<EOF
12+
- trigger: elasticsearch-hadoop-dra-workflow
13+
label: Trigger DRA snapshot workflow for $BRANCH
14+
async: true
15+
build:
16+
branch: $BRANCH
17+
env:
18+
DRA_WORKFLOW: snapshot
19+
- trigger: elasticsearch-hadoop-dra-workflow
20+
label: Trigger DRA staging workflow for $BRANCH
21+
async: true
22+
build:
23+
branch: $BRANCH
24+
env:
25+
DRA_WORKFLOW: staging
26+
EOF
27+
done

.buildkite/dra-workflow.yml

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
steps:
2+
- label: DRA Workflow
3+
command: .buildkite/dra.sh
4+
timeout_in_minutes: 60
5+
env:
6+
USE_DRA_CREDENTIALS: true

.buildkite/dra.sh

+64
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
DRA_WORKFLOW=${DRA_WORKFLOW:-snapshot}
6+
7+
if [[ "$BUILDKITE_BRANCH" == "main" && "$DRA_WORKFLOW" == "staging" ]]; then
8+
exit 0
9+
fi
10+
11+
echo --- Creating distribution
12+
13+
rm -Rfv ~/.gradle/init.d
14+
HADOOP_VERSION=$(grep eshadoop buildSrc/esh-version.properties | sed "s/eshadoop *= *//g")
15+
16+
VERSION_SUFFIX=""
17+
BUILD_ARGS="-Dbuild.snapshot=false"
18+
if [[ "$DRA_WORKFLOW" == "snapshot" ]]; then
19+
VERSION_SUFFIX="-SNAPSHOT"
20+
BUILD_ARGS="-Dbuild.snapshot=true"
21+
fi
22+
23+
RM_BRANCH="$BUILDKITE_BRANCH"
24+
if [[ "$BUILDKITE_BRANCH" == "main" ]]; then
25+
RM_BRANCH=master
26+
fi
27+
28+
echo "DRA_WORKFLOW=$DRA_WORKFLOW"
29+
echo "HADOOP_VERSION=$HADOOP_VERSION"
30+
echo "RM_BRANCH=$RM_BRANCH"
31+
echo "VERSION_SUFFIX=$VERSION_SUFFIX"
32+
echo "BUILD_ARGS=$BUILD_ARGS"
33+
34+
ES_BUILD_ID=$(curl -sS "https://artifacts-$DRA_WORKFLOW.elastic.co/elasticsearch/latest/${RM_BRANCH}.json" | jq -r '.build_id')
35+
echo "ES_BUILD_ID=$ES_BUILD_ID"
36+
37+
mkdir localRepo
38+
wget --quiet "https://artifacts-$DRA_WORKFLOW.elastic.co/elasticsearch/${ES_BUILD_ID}/maven/org/elasticsearch/gradle/build-tools/${HADOOP_VERSION}${VERSION_SUFFIX}/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar" \
39+
-O "localRepo/build-tools-${HADOOP_VERSION}${VERSION_SUFFIX}.jar"
40+
41+
./gradlew -S -PlocalRepo=true "${BUILD_ARGS}" -Dorg.gradle.warning.mode=summary -Dcsv="$WORKSPACE/build/distributions/dependencies-${HADOOP_VERSION}${VERSION_SUFFIX}.csv" :dist:generateDependenciesReport distribution
42+
43+
# Allow other users access to read the artifacts so they are readable in the container
44+
find "$WORKSPACE" -type f -path "*/build/distributions/*" -exec chmod a+r {} \;
45+
46+
# Allow other users write access to create checksum files
47+
find "$WORKSPACE" -type d -path "*/build/distributions" -exec chmod a+w {} \;
48+
49+
echo --- Running release-manager
50+
51+
docker run --rm \
52+
--name release-manager \
53+
-e VAULT_ADDR="$DRA_VAULT_ADDR" \
54+
-e VAULT_ROLE_ID="$DRA_VAULT_ROLE_ID_SECRET" \
55+
-e VAULT_SECRET_ID="$DRA_VAULT_SECRET_ID_SECRET" \
56+
--mount type=bind,readonly=false,src="$PWD",target=/artifacts \
57+
docker.elastic.co/infra/release-manager:latest \
58+
cli collect \
59+
--project elasticsearch-hadoop \
60+
--branch "$RM_BRANCH" \
61+
--commit "$BUILDKITE_COMMIT" \
62+
--workflow "$DRA_WORKFLOW" \
63+
--version "$HADOOP_VERSION" \
64+
--artifact-set main

.buildkite/hooks/pre-command

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
#!/bin/bash
2+
3+
WORKSPACE="$(pwd)"
4+
export WORKSPACE
5+
6+
# i.e. source and export the variables
7+
export $(cat .ci/java-versions.properties | grep '=' | xargs)
8+
9+
JAVA_HOME="$HOME/.java/$ESH_BUILD_JAVA"
10+
export JAVA_HOME
11+
12+
RUNTIME_JAVA_HOME="$HOME/.java/$ESH_RUNTIME_JAVA"
13+
export RUNTIME_JAVA_HOME
14+
15+
JAVA7_HOME="$HOME/.java/java7"
16+
export JAVA7_HOME
17+
18+
JAVA8_HOME="$HOME/.java/java8"
19+
export JAVA8_HOME
20+
21+
JAVA9_HOME="$HOME/.java/java9"
22+
export JAVA9_HOME
23+
24+
JAVA10_HOME="$HOME/.java/java10"
25+
export JAVA10_HOME
26+
27+
JAVA11_HOME="$HOME/.java/java11"
28+
export JAVA11_HOME
29+
30+
JAVA12_HOME="$HOME/.java/openjdk12"
31+
export JAVA12_HOME
32+
33+
JAVA13_HOME="$HOME/.java/openjdk13"
34+
export JAVA13_HOME
35+
36+
JAVA14_HOME="$HOME/.java/openjdk14"
37+
export JAVA14_HOME
38+
39+
if [[ "$USE_DRA_CREDENTIALS" == "true" ]]; then
40+
DRA_VAULT_ROLE_ID_SECRET=$(vault read -field=role-id secret/ci/elastic-elasticsearch-hadoop/legacy-vault-credentials)
41+
export DRA_VAULT_ROLE_ID_SECRET
42+
43+
DRA_VAULT_SECRET_ID_SECRET=$(vault read -field=secret-id secret/ci/elastic-elasticsearch-hadoop/legacy-vault-credentials)
44+
export DRA_VAULT_SECRET_ID_SECRET
45+
46+
DRA_VAULT_ADDR=https://secrets.elastic.co:8200
47+
export DRA_VAULT_ADDR
48+
fi

.buildkite/pipeline.py

+91
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import json
2+
import os
3+
import re
4+
from typing import Dict
5+
6+
# Note: If you'd like to add any debug info here, make sure to do it on stderr
7+
# stdout will be fed into `buildkite-agent pipeline upload`
8+
9+
coreFile = open("spark/core/build.gradle", "r")
10+
core = coreFile.read()
11+
coreFile.close()
12+
13+
# `Variant "spark20scala212"` => ["20", "212"]
14+
groupings = re.findall(r'Variant +"spark([0-9]+)scala([0-9]+)"', core)
15+
16+
groupingsBySparkVersion: Dict[str, list[str]] = {}
17+
for grouping in groupings:
18+
if grouping[0] not in groupingsBySparkVersion:
19+
groupingsBySparkVersion[grouping[0]] = []
20+
groupingsBySparkVersion[grouping[0]].append(grouping[1])
21+
22+
gradlePropertiesFile = open("gradle.properties", "r")
23+
gradleProperties = gradlePropertiesFile.read()
24+
gradlePropertiesFile.close()
25+
# `scala210Version = 2.10.7` => ["210", "2.10.7"]
26+
matches = re.findall(
27+
r"scala([0-9]+)Version *= *([0-9]+\.[0-9]+\.[0-9]+)", gradleProperties
28+
)
29+
30+
scalaVersions = {}
31+
for match in matches:
32+
scalaVersions[match[0]] = match[1]
33+
34+
35+
pipeline = {
36+
"agents": {
37+
"provider": "gcp",
38+
"image": "family/elasticsearch-ubuntu-2004",
39+
"machineType": "n2-standard-8",
40+
"diskType": "pd-ssd",
41+
"diskSizeGb": "100",
42+
"useVault": "false",
43+
},
44+
"steps": [],
45+
}
46+
47+
intakeTasks = map(
48+
lambda sparkVersion: f"-x :elasticsearch-spark-{sparkVersion}:integrationTest",
49+
groupingsBySparkVersion.keys(),
50+
)
51+
52+
53+
pipeline["steps"].append(
54+
{
55+
"label": "intake",
56+
"timeout_in_minutes": 240,
57+
"command": "./gradlew check " + " ".join(intakeTasks),
58+
}
59+
)
60+
61+
for sparkVersion in groupingsBySparkVersion.keys():
62+
for scalaVersion in groupingsBySparkVersion[sparkVersion]:
63+
scalaFullVersion = scalaVersions[scalaVersion]
64+
pipeline["steps"].append(
65+
{
66+
"label": f"spark-{sparkVersion} / scala-{scalaFullVersion}",
67+
"timeout_in_minutes": 180,
68+
"command": f"./gradlew :elasticsearch-spark-{sparkVersion}:integrationTest -Pscala.variant={scalaFullVersion}",
69+
}
70+
)
71+
72+
if os.environ.get("ENABLE_DRA_WORKFLOW") == "true":
73+
pipeline["steps"].append(
74+
{
75+
"wait": None,
76+
}
77+
)
78+
79+
pipeline["steps"].append(
80+
{
81+
"label": "DRA Snapshot Workflow",
82+
"command": ".buildkite/dra.sh",
83+
"timeout_in_minutes": 60,
84+
"agents": {"useVault": "true"},
85+
"env": {
86+
"USE_DRA_CREDENTIALS": "true",
87+
},
88+
},
89+
)
90+
91+
print(json.dumps(pipeline, indent=2))

.buildkite/pull-requests.json

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
{
2+
"jobs": [
3+
{
4+
"enabled": true,
5+
"pipeline_slug": "elasticsearch-hadoop-tests",
6+
"allow_org_users": true,
7+
"allowed_repo_permissions": [
8+
"admin",
9+
"write"
10+
],
11+
"set_commit_status": false,
12+
"build_on_commit": true,
13+
"build_on_comment": true,
14+
"trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
15+
"always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))",
16+
"skip_ci_labels": [
17+
"skip-ci"
18+
],
19+
"skip_ci_on_only_changed": [
20+
"^docs/",
21+
"\\.md$",
22+
"\\.mdx$",
23+
"^\\.buildkite/pull_requests\\.json$"
24+
]
25+
}
26+
]
27+
}

.buildkite/tests.trigger.sh

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
echo "steps:"
6+
7+
source .buildkite/branches.sh
8+
9+
for BRANCH in "${BRANCHES[@]}"; do
10+
cat <<EOF
11+
- trigger: elasticsearch-hadoop-tests
12+
label: Trigger tests pipeline for $BRANCH
13+
async: true
14+
build:
15+
branch: $BRANCH
16+
EOF
17+
done
18+
19+
20+

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -17,3 +17,4 @@ metastore_db
1717
/spark/keyvaluerdd.parquet
1818
out/
1919
localRepo/
20+
.vscode

0 commit comments

Comments
 (0)