@@ -296,73 +296,6 @@ def gpu_pool(self, tmpdir, monkeypatch):
296
296
297
297
@pytest .mark .parametrize (
298
298
'cluster_manager,user_spark_opts,expected_output' , [
299
- # dynamic resource allocation enabled
300
- (
301
- 'kubernetes' ,
302
- {
303
- 'spark.dynamicAllocation.enabled' : 'true' ,
304
- 'spark.executor.cores' : '4' ,
305
- 'spark.cores.max' : '128' ,
306
- },
307
- {
308
- 'spark.executor.memory' : '4g' ,
309
- 'spark.executor.cores' : '4' ,
310
- 'spark.executor.instances' : '2' ,
311
- 'spark.kubernetes.executor.limit.cores' : '4' ,
312
- 'spark.kubernetes.allocation.batch.size' : '512' ,
313
- 'spark.scheduler.maxRegisteredResourcesWaitingTime' : '15min' ,
314
- },
315
- ),
316
- (
317
- 'kubernetes' ,
318
- {
319
- 'spark.dynamicAllocation.enabled' : 'true' ,
320
- 'spark.dynamicAllocation.maxExecutors' : '512' ,
321
- 'spark.dynamicAllocation.minExecutors' : '128' ,
322
- 'spark.dynamicAllocation.initialExecutors' : '128' ,
323
- 'spark.executor.cores' : '4' ,
324
- },
325
- {
326
- 'spark.executor.memory' : '4g' ,
327
- 'spark.executor.cores' : '4' ,
328
- 'spark.executor.instances' : '2' ,
329
- 'spark.kubernetes.executor.limit.cores' : '4' ,
330
- 'spark.kubernetes.allocation.batch.size' : '512' ,
331
- 'spark.scheduler.maxRegisteredResourcesWaitingTime' : '15min' ,
332
- },
333
- ),
334
- # dynamic resource allocation disabled with instances specified
335
- (
336
- 'kubernetes' ,
337
- {
338
- 'spark.dynamicAllocation.enabled' : 'false' ,
339
- 'spark.executor.instances' : '600' ,
340
- },
341
- {
342
- 'spark.executor.memory' : '4g' ,
343
- 'spark.executor.cores' : '2' ,
344
- 'spark.executor.instances' : '600' ,
345
- 'spark.kubernetes.executor.limit.cores' : '2' ,
346
- 'spark.kubernetes.allocation.batch.size' : '512' ,
347
- 'spark.scheduler.maxRegisteredResourcesWaitingTime' : '35min' ,
348
- },
349
- ),
350
- # dynamic resource allocation disabled with instances not specified
351
- (
352
- 'kubernetes' ,
353
- {
354
- 'spark.executor.cores' : '4' ,
355
- 'spark.cores.max' : '128' ,
356
- },
357
- {
358
- 'spark.executor.memory' : '4g' ,
359
- 'spark.executor.cores' : '4' ,
360
- 'spark.executor.instances' : '32' ,
361
- 'spark.kubernetes.executor.limit.cores' : '4' ,
362
- 'spark.kubernetes.allocation.batch.size' : '512' ,
363
- 'spark.scheduler.maxRegisteredResourcesWaitingTime' : '16min' ,
364
- },
365
- ),
366
299
# k8s allocation batch size not specified
367
300
(
368
301
'kubernetes' ,
@@ -531,6 +464,87 @@ def test_adjust_spark_requested_resources_error(
531
464
with pytest .raises (ValueError ):
532
465
spark_config ._adjust_spark_requested_resources (spark_opts , cluster_manager , pool )
533
466
467
+ @pytest .mark .parametrize (
468
+ 'user_spark_opts,expected_output' , [
469
+ # dynamic resource allocation enabled
470
+ (
471
+ {
472
+ 'spark.dynamicAllocation.enabled' : 'true' ,
473
+ },
474
+ {
475
+ 'spark.dynamicAllocation.enabled' : 'true' ,
476
+ 'spark.dynamicAllocation.shuffleTracking.enabled' : 'true' ,
477
+ 'spark.dynamicAllocation.executorAllocationRatio' : '0.8' ,
478
+ 'spark.dynamicAllocation.cachedExecutorIdleTimeout' : '420s' ,
479
+ 'spark.dynamicAllocation.minExecutors' : '0' ,
480
+ 'spark.dynamicAllocation.maxExecutors' : '2' ,
481
+ 'spark.executor.instances' : '0' ,
482
+ },
483
+ ),
484
+ (
485
+ {
486
+ 'spark.dynamicAllocation.enabled' : 'true' ,
487
+ 'spark.dynamicAllocation.maxExecutors' : '512' ,
488
+ 'spark.dynamicAllocation.minExecutors' : '128' ,
489
+ 'spark.dynamicAllocation.initialExecutors' : '128' ,
490
+ },
491
+ {
492
+ 'spark.dynamicAllocation.enabled' : 'true' ,
493
+ 'spark.dynamicAllocation.maxExecutors' : '512' ,
494
+ 'spark.dynamicAllocation.minExecutors' : '128' ,
495
+ 'spark.dynamicAllocation.initialExecutors' : '128' ,
496
+ 'spark.dynamicAllocation.shuffleTracking.enabled' : 'true' ,
497
+ 'spark.dynamicAllocation.executorAllocationRatio' : '0.8' ,
498
+ 'spark.dynamicAllocation.cachedExecutorIdleTimeout' : '420s' ,
499
+ 'spark.executor.instances' : '128' ,
500
+ },
501
+ ),
502
+ (
503
+ {
504
+ 'spark.dynamicAllocation.enabled' : 'true' ,
505
+ 'spark.executor.instances' : '821' ,
506
+ },
507
+ {
508
+ 'spark.dynamicAllocation.enabled' : 'true' ,
509
+ 'spark.dynamicAllocation.maxExecutors' : '821' ,
510
+ 'spark.dynamicAllocation.minExecutors' : '205' ,
511
+ 'spark.dynamicAllocation.shuffleTracking.enabled' : 'true' ,
512
+ 'spark.dynamicAllocation.executorAllocationRatio' : '0.8' ,
513
+ 'spark.dynamicAllocation.cachedExecutorIdleTimeout' : '420s' ,
514
+ 'spark.executor.instances' : '205' ,
515
+ },
516
+ ),
517
+ # dynamic resource allocation disabled explicitly
518
+ (
519
+ {
520
+ 'spark.dynamicAllocation.enabled' : 'false' ,
521
+ 'spark.executor.instances' : '600' ,
522
+ },
523
+ {
524
+ 'spark.dynamicAllocation.enabled' : 'false' ,
525
+ 'spark.executor.instances' : '600' ,
526
+ },
527
+ ),
528
+ # dynamic resource allocation not specified
529
+ (
530
+ {
531
+ 'spark.executor.instances' : '606' ,
532
+ },
533
+ {
534
+ 'spark.executor.instances' : '606' ,
535
+ },
536
+ ),
537
+ ],
538
+ )
539
+ def test_get_dra_configs (
540
+ self ,
541
+ user_spark_opts ,
542
+ expected_output ,
543
+ ):
544
+ output = spark_config ._get_dra_configs (user_spark_opts )
545
+ for key in expected_output .keys ():
546
+ assert output [key ] == expected_output [key ], f'wrong value for { key } '
547
+
534
548
@pytest .mark .parametrize (
535
549
'user_spark_opts,aws_creds,expected_output' , [
536
550
# user specified to disable
0 commit comments