@@ -120,6 +120,11 @@ class YarnClusterSuite extends BaseYarnClusterSuite {
120
120
finalState should be (SparkAppHandle .State .FAILED )
121
121
}
122
122
123
+ test(" run Spark in yarn-cluster mode failure after sc initialized" ) {
124
+ val finalState = runSpark(false , mainClassName(YarnClusterDriverWithFailure .getClass))
125
+ finalState should be (SparkAppHandle .State .FAILED )
126
+ }
127
+
123
128
test(" run Python application in yarn-client mode" ) {
124
129
testPySpark(true )
125
130
}
@@ -259,6 +264,16 @@ private[spark] class SaveExecutorInfo extends SparkListener {
259
264
}
260
265
}
261
266
267
+ private object YarnClusterDriverWithFailure extends Logging with Matchers {
268
+ def main (args : Array [String ]): Unit = {
269
+ val sc = new SparkContext (new SparkConf ()
270
+ .set(" spark.extraListeners" , classOf [SaveExecutorInfo ].getName)
271
+ .setAppName(" yarn test with failure" ))
272
+
273
+ throw new Exception (" exception after sc initialized" )
274
+ }
275
+ }
276
+
262
277
private object YarnClusterDriver extends Logging with Matchers {
263
278
264
279
val WAIT_TIMEOUT_MILLIS = 10000
@@ -287,19 +302,19 @@ private object YarnClusterDriver extends Logging with Matchers {
287
302
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS )
288
303
data should be (Set (1 , 2 , 3 , 4 ))
289
304
result = " success"
305
+
306
+ // Verify that the config archive is correctly placed in the classpath of all containers.
307
+ val confFile = " /" + Client .SPARK_CONF_FILE
308
+ assert(getClass().getResource(confFile) != null )
309
+ val configFromExecutors = sc.parallelize(1 to 4 , 4 )
310
+ .map { _ => Option (getClass().getResource(confFile)).map(_.toString).orNull }
311
+ .collect()
312
+ assert(configFromExecutors.find(_ == null ) === None )
290
313
} finally {
291
314
Files .write(result, status, StandardCharsets .UTF_8 )
292
315
sc.stop()
293
316
}
294
317
295
- // Verify that the config archive is correctly placed in the classpath of all containers.
296
- val confFile = " /" + Client .SPARK_CONF_FILE
297
- assert(getClass().getResource(confFile) != null )
298
- val configFromExecutors = sc.parallelize(1 to 4 , 4 )
299
- .map { _ => Option (getClass().getResource(confFile)).map(_.toString).orNull }
300
- .collect()
301
- assert(configFromExecutors.find(_ == null ) === None )
302
-
303
318
// verify log urls are present
304
319
val listeners = sc.listenerBus.findListenersByClass[SaveExecutorInfo ]
305
320
assert(listeners.size === 1 )
@@ -330,9 +345,6 @@ private object YarnClusterDriver extends Logging with Matchers {
330
345
}
331
346
332
347
private object YarnClasspathTest extends Logging {
333
-
334
- var exitCode = 0
335
-
336
348
def error (m : String , ex : Throwable = null ): Unit = {
337
349
logError(m, ex)
338
350
// scalastyle:off println
@@ -361,7 +373,6 @@ private object YarnClasspathTest extends Logging {
361
373
} finally {
362
374
sc.stop()
363
375
}
364
- System .exit(exitCode)
365
376
}
366
377
367
378
private def readResource (resultPath : String ): Unit = {
@@ -374,8 +385,6 @@ private object YarnClasspathTest extends Logging {
374
385
} catch {
375
386
case t : Throwable =>
376
387
error(s " loading test.resource to $resultPath" , t)
377
- // set the exit code if not yet set
378
- exitCode = 2
379
388
} finally {
380
389
Files .write(result, new File (resultPath), StandardCharsets .UTF_8 )
381
390
}
0 commit comments