62
62
'spark.kubernetes.executor.label.paasta.yelp.com/instance' ,
63
63
'spark.kubernetes.executor.label.paasta.yelp.com/cluster' ,
64
64
}
65
- K8S_AUTH_FOLDER = '/etc/spark_k8s_secrets'
65
+ K8S_AUTH_FOLDER = '/etc/pki/spark'
66
+ DEFAULT_SPARK_K8S_SECRET_VOLUME = {
67
+ 'hostPath' : K8S_AUTH_FOLDER ,
68
+ 'containerPath' : K8S_AUTH_FOLDER ,
69
+ 'mode' : 'RO' ,
70
+ }
71
+
66
72
log = logging .Logger (__name__ )
67
73
68
74
@@ -163,6 +169,23 @@ def _get_mesos_docker_volumes_conf(
163
169
return {'spark.mesos.executor.docker.volumes' : volume_str }
164
170
165
171
172
+ def _get_k8s_docker_volumes_conf (
173
+ volumes : Optional [List [Mapping [str , str ]]] = None ,
174
+ ):
175
+ env = {}
176
+ k8s_volumes = volumes or []
177
+ k8s_volumes .append (DEFAULT_SPARK_K8S_SECRET_VOLUME )
178
+ k8s_volumes .append ({'containerPath' : '/etc/passwd' , 'hostPath' : '/etc/passwd' , 'mode' : 'RO' })
179
+ k8s_volumes .append ({'containerPath' : '/etc/group' , 'hostPath' : '/etc/group' , 'mode' : 'RO' })
180
+ for volume_name , volume in enumerate (k8s_volumes ):
181
+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.path' ] = volume ['containerPath' ]
182
+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.readOnly' ] = (
183
+ 'true' if volume ['mode' ].lower () == 'ro' else 'false'
184
+ )
185
+ env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .options.path' ] = volume ['hostPath' ]
186
+ return env
187
+
188
+
166
189
def _append_sql_shuffle_partitions_conf (spark_opts : Dict [str , str ]) -> Dict [str , str ]:
167
190
if 'spark.sql.shuffle.partitions' in spark_opts :
168
191
return spark_opts
@@ -409,7 +432,7 @@ def _get_k8s_spark_env(
409
432
paasta_pool : str ,
410
433
) -> Dict [str , str ]:
411
434
spark_env = {
412
- 'spark.master' : f'k8s://https://k8s.paasta- { paasta_cluster } .yelp:16443 ' ,
435
+ 'spark.master' : f'k8s://https://k8s.{ paasta_cluster } .paasta:6443 ' ,
413
436
'spark.executorEnv.PAASTA_SERVICE' : paasta_service ,
414
437
'spark.executorEnv.PAASTA_INSTANCE' : paasta_instance ,
415
438
'spark.executorEnv.PAASTA_CLUSTER' : paasta_cluster ,
@@ -430,14 +453,8 @@ def _get_k8s_spark_env(
430
453
'spark.kubernetes.executor.label.paasta.yelp.com/cluster' : paasta_cluster ,
431
454
'spark.kubernetes.node.selector.yelp.com/pool' : paasta_pool ,
432
455
'spark.kubernetes.executor.label.yelp.com/pool' : paasta_pool ,
456
+ ** _get_k8s_docker_volumes_conf (volumes ),
433
457
}
434
- for i , volume in enumerate (volumes or []):
435
- volume_name = i
436
- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.path' ] = volume ['containerPath' ]
437
- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .mount.readOnly' ] = (
438
- 'true' if volume ['mode' ].lower () == 'ro' else 'false'
439
- )
440
- spark_env [f'spark.kubernetes.executor.volumes.hostPath.{ volume_name } .options.path' ] = volume ['hostPath' ]
441
458
return spark_env
442
459
443
460
0 commit comments