Skip to content

Commit f5d7159

Browse files
authored
Support Spark 3.0.2 (#833)
1 parent fe03c74 commit f5d7159

File tree

4 files changed

+30
-16
lines changed

4 files changed

+30
-16
lines changed

azure-pipelines.yml

Lines changed: 26 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,16 @@ variables:
3333

3434
# Filter DataFrameTests.TestDataFrameGroupedMapUdf and DataFrameTests.TestGroupedMapUdf backwardCompatible
3535
# tests due to https://github.com/dotnet/spark/pull/711
36-
backwardCompatibleTestOptions_Windows_3: "--filter \
36+
backwardCompatibleTestOptions_Windows_3_0: "--filter \
3737
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameGroupedMapUdf)&\
3838
(FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestGroupedMapUdf)"
39-
forwardCompatibleTestOptions_Windows_3: ""
40-
backwardCompatibleTestOptions_Linux_3: $(backwardCompatibleTestOptions_Windows_3)
41-
forwardCompatibleTestOptions_Linux_3: $(forwardCompatibleTestOptions_Linux_2_4)
39+
forwardCompatibleTestOptions_Windows_3_0: ""
40+
backwardCompatibleTestOptions_Linux_3_0: $(backwardCompatibleTestOptions_Windows_3_0)
41+
forwardCompatibleTestOptions_Linux_3_0: $(forwardCompatibleTestOptions_Linux_2_4)
42+
43+
# Skip all forward compatible tests since Spark 3.0.2 is not supported in microsoft-spark-3-0_2.12-1.0.0.jar
44+
forwardCompatibleTestOptions_Windows_3_0_2: "--filter FullyQualifiedName=NONE"
45+
forwardCompatibleTestOptions_Linux_3_0_2: $(forwardCompatibleTestOptions_Windows_3_0_2)
4246

4347
# Azure DevOps variables are transformed into environment variables, with these variables we
4448
# avoid the first time experience and telemetry to speed up the build.
@@ -331,19 +335,29 @@ stages:
331335
jobOptions:
332336
- pool: 'Hosted VS2017'
333337
testOptions: ""
334-
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
335-
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
338+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_0)
339+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_0)
336340
- pool: 'Hosted Ubuntu 1604'
337341
testOptions: ""
338-
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
339-
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)
342+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
343+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0)
340344
- version: '3.0.1'
341345
jobOptions:
342346
- pool: 'Hosted VS2017'
343347
testOptions: ""
344-
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3)
345-
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3)
348+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_0)
349+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_0)
350+
- pool: 'Hosted Ubuntu 1604'
351+
testOptions: ""
352+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
353+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0)
354+
- version: '3.0.2'
355+
jobOptions:
356+
- pool: 'Hosted VS2017'
357+
testOptions: ""
358+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_3_0)
359+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_3_0_2)
346360
- pool: 'Hosted Ubuntu 1604'
347361
testOptions: ""
348-
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3)
349-
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3)
362+
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
363+
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0_2)

src/scala/microsoft-spark-2-3/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ object DotnetRunner extends Logging {
166166

167167
private def validateSparkVersions: Unit = {
168168
if (!supportedSparkVersions(SPARK_VERSION)) {
169-
val supportedVersions = supportedSparkVersions.mkString(", ")
169+
val supportedVersions = supportedSparkVersions.toSeq.sorted.mkString(", ")
170170
throw new IllegalArgumentException(
171171
s"Unsupported spark version used: ${spark.SPARK_VERSION}. Normalized spark version used: ${SPARK_VERSION}." +
172172
s" Supported versions: ${supportedVersions}")

src/scala/microsoft-spark-2-4/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ object DotnetRunner extends Logging {
167167

168168
private def validateSparkVersions: Unit = {
169169
if (!supportedSparkVersions(SPARK_VERSION)) {
170-
val supportedVersions = supportedSparkVersions.mkString(", ")
170+
val supportedVersions = supportedSparkVersions.toSeq.sorted.mkString(", ")
171171
throw new IllegalArgumentException(
172172
s"Unsupported spark version used: ${spark.SPARK_VERSION}. Normalized spark version used: ${SPARK_VERSION}." +
173173
s" Supported versions: ${supportedVersions}")

src/scala/microsoft-spark-3-0/src/main/scala/org/apache/spark/deploy/dotnet/DotnetRunner.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import scala.util.Try
3434
*/
3535
object DotnetRunner extends Logging {
3636
private val DEBUG_PORT = 5567
37-
private val supportedSparkVersions = Set[String]("3.0.0", "3.0.1")
37+
private val supportedSparkVersions = Set[String]("3.0.0", "3.0.1", "3.0.2")
3838

3939
val SPARK_VERSION = DotnetUtils.normalizeSparkVersion(spark.SPARK_VERSION)
4040

@@ -166,7 +166,7 @@ object DotnetRunner extends Logging {
166166

167167
private def validateSparkVersions: Unit = {
168168
if (!supportedSparkVersions(SPARK_VERSION)) {
169-
val supportedVersions = supportedSparkVersions.mkString(", ")
169+
val supportedVersions = supportedSparkVersions.toSeq.sorted.mkString(", ")
170170
throw new IllegalArgumentException(
171171
s"Unsupported spark version used: ${spark.SPARK_VERSION}. Normalized spark version used: ${SPARK_VERSION}." +
172172
s" Supported versions: ${supportedVersions}")

0 commit comments

Comments
 (0)