|
| 1 | +import json |
| 2 | +import os |
| 3 | +import re |
| 4 | +from typing import Dict |
| 5 | + |
| 6 | +# Note: If you'd like to add any debug info here, make sure to do it on stderr |
| 7 | +# stdout will be fed into `buildkite-agent pipeline upload` |
| 8 | + |
| 9 | +coreFile = open("spark/core/build.gradle", "r") |
| 10 | +core = coreFile.read() |
| 11 | +coreFile.close() |
| 12 | + |
| 13 | +# `Variant "spark20scala212"` => ["20", "212"] |
| 14 | +groupings = re.findall(r'Variant +"spark([0-9]+)scala([0-9]+)"', core) |
| 15 | + |
| 16 | +groupingsBySparkVersion: Dict[str, list[str]] = {} |
| 17 | +for grouping in groupings: |
| 18 | + if grouping[0] not in groupingsBySparkVersion: |
| 19 | + groupingsBySparkVersion[grouping[0]] = [] |
| 20 | + groupingsBySparkVersion[grouping[0]].append(grouping[1]) |
| 21 | + |
| 22 | +gradlePropertiesFile = open("gradle.properties", "r") |
| 23 | +gradleProperties = gradlePropertiesFile.read() |
| 24 | +gradlePropertiesFile.close() |
| 25 | +# `scala210Version = 2.10.7` => ["210", "2.10.7"] |
| 26 | +matches = re.findall( |
| 27 | + r"scala([0-9]+)Version *= *([0-9]+\.[0-9]+\.[0-9]+)", gradleProperties |
| 28 | +) |
| 29 | + |
| 30 | +scalaVersions = {} |
| 31 | +for match in matches: |
| 32 | + scalaVersions[match[0]] = match[1] |
| 33 | + |
| 34 | + |
| 35 | +pipeline = { |
| 36 | + "agents": { |
| 37 | + "provider": "gcp", |
| 38 | + "image": "family/elasticsearch-ubuntu-2004", |
| 39 | + "machineType": "n2-standard-8", |
| 40 | + "diskType": "pd-ssd", |
| 41 | + "diskSizeGb": "100", |
| 42 | + "useVault": "false", |
| 43 | + }, |
| 44 | + "steps": [], |
| 45 | +} |
| 46 | + |
| 47 | +intakeTasks = map( |
| 48 | + lambda sparkVersion: f"-x :elasticsearch-spark-{sparkVersion}:integrationTest", |
| 49 | + groupingsBySparkVersion.keys(), |
| 50 | +) |
| 51 | + |
| 52 | + |
| 53 | +pipeline["steps"].append( |
| 54 | + { |
| 55 | + "label": "intake", |
| 56 | + "timeout_in_minutes": 240, |
| 57 | + "command": "./gradlew check " + " ".join(intakeTasks), |
| 58 | + } |
| 59 | +) |
| 60 | + |
| 61 | +for sparkVersion in groupingsBySparkVersion.keys(): |
| 62 | + for scalaVersion in groupingsBySparkVersion[sparkVersion]: |
| 63 | + scalaFullVersion = scalaVersions[scalaVersion] |
| 64 | + pipeline["steps"].append( |
| 65 | + { |
| 66 | + "label": f"spark-{sparkVersion} / scala-{scalaFullVersion}", |
| 67 | + "timeout_in_minutes": 180, |
| 68 | + "command": f"./gradlew :elasticsearch-spark-{sparkVersion}:integrationTest -Pscala.variant={scalaFullVersion}", |
| 69 | + } |
| 70 | + ) |
| 71 | + |
| 72 | +if os.environ.get("ENABLE_DRA_WORKFLOW") == "true": |
| 73 | + pipeline["steps"].append( |
| 74 | + { |
| 75 | + "wait": None, |
| 76 | + } |
| 77 | + ) |
| 78 | + |
| 79 | + pipeline["steps"].append( |
| 80 | + { |
| 81 | + "label": "DRA Snapshot Workflow", |
| 82 | + "command": ".buildkite/dra.sh", |
| 83 | + "timeout_in_minutes": 60, |
| 84 | + "agents": {"useVault": "true"}, |
| 85 | + "env": { |
| 86 | + "USE_DRA_CREDENTIALS": "true", |
| 87 | + }, |
| 88 | + }, |
| 89 | + ) |
| 90 | + |
| 91 | +print(json.dumps(pipeline, indent=2)) |
0 commit comments