@@ -44,6 +44,54 @@ variables:
44
44
forwardCompatibleTestOptions_Windows_3_0_2 : " --filter FullyQualifiedName=NONE"
45
45
forwardCompatibleTestOptions_Linux_3_0_2 : $(forwardCompatibleTestOptions_Windows_3_0_2)
46
46
47
+ # Skip backwardCompatible tests because Microsoft.Spark.Worker requires Spark 3.1 support in
48
+ # CommandProcessor.cs and TaskContextProcessor.cs. Support added in https://github.com/dotnet/spark/pull/836
49
+ backwardCompatibleTestOptions_Windows_3_1 : " --filter \
50
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameGroupedMapUdf)&\
51
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestGroupedMapUdf&\
52
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfRegistrationWithReturnAsRowType)&\
53
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithArrayChain)&\
54
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithSimpleArrayType)&\
55
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithMapType)&\
56
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithRowArrayType)&\
57
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithReturnAsMapType)&\
58
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithReturnAsArrayOfArrayType)&\
59
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithArrayOfArrayType)&\
60
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithMapOfMapType)&\
61
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithReturnAsSimpleArrayType)&\
62
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithRowType)&\
63
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfComplexTypesTests.TestUdfWithReturnAsRowType)&\
64
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSerDeTests.TestExternalStaticMethodCall)&\
65
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSerDeTests.TestInitExternalClassInUdf)&\
66
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSerDeTests.TestUdfClosure)&\
67
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithReturnAsDateType)&\
68
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithReturnAsTimestampType)&\
69
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithDateType)&\
70
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.UdfTests.UdfSimpleTypesTests.TestUdfWithTimestampType)&\
71
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestMultipleBroadcast)&\
72
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestUnpersist)&\
73
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.BroadcastTests.TestDestroy)&\
74
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.PairRDDFunctionsTests.TestCollect)&\
75
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestPipelinedRDD)&\
76
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestMap)&\
77
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestFlatMap)&\
78
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestMapPartitions)&\
79
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestMapPartitionsWithIndex)&\
80
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestTextFile)&\
81
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.RDDTests.TestFilter)&\
82
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestDataFrameVectorUdf)&\
83
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestVectorUdf)&\
84
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestWithColumn)&\
85
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataFrameTests.TestUDF)&\
86
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.SparkSessionExtensionsTests.TestVersion)&\
87
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataStreamWriterTests.TestForeachBatch)&\
88
+ (FullyQualifiedName!=Microsoft.Spark.E2ETest.IpcTests.DataStreamWriterTests.TestForeach)"
89
+ # Skip all forwardCompatible tests since microsoft-spark-3-1 jar does not get built when
90
+ # building forwardCompatible repo.
91
+ forwardCompatibleTestOptions_Windows_3_1 : " --filter FullyQualifiedName=NONE"
92
+ backwardCompatibleTestOptions_Linux_3_1 : $(backwardCompatibleTestOptions_Windows_3_1)
93
+ forwardCompatibleTestOptions_Linux_3_1 : $(forwardCompatibleTestOptions_Windows_3_1)
94
+
47
95
# Azure DevOps variables are transformed into environment variables, with these variables we
48
96
# avoid the first time experience and telemetry to speed up the build.
49
97
DOTNET_CLI_TELEMETRY_OPTOUT : 1
@@ -361,3 +409,13 @@ stages:
361
409
testOptions : " "
362
410
backwardCompatibleTestOptions : $(backwardCompatibleTestOptions_Linux_3_0)
363
411
forwardCompatibleTestOptions : $(forwardCompatibleTestOptions_Linux_3_0_2)
412
+ - version : ' 3.1.1'
413
+ jobOptions :
414
+ - pool : ' Hosted VS2017'
415
+ testOptions : " "
416
+ backwardCompatibleTestOptions : $(backwardCompatibleTestOptions_Windows_3_1)
417
+ forwardCompatibleTestOptions : $(backwardCompatibleTestOptions_Windows_3_1)
418
+ - pool : ' Hosted Ubuntu 1604'
419
+ testOptions : " "
420
+ backwardCompatibleTestOptions : $(backwardCompatibleTestOptions_Linux_3_1)
421
+ forwardCompatibleTestOptions : $(forwardCompatibleTestOptions_Linux_3_1)
0 commit comments