Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 0 additions & 11 deletions service_configuration_lib/spark_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,7 @@
'spark.kubernetes.executor.label.spark.yelp.com/user',
}

K8S_AUTH_FOLDER = '/etc/pki/spark'
K8S_BASE_VOLUMES: List[Dict[str, str]] = [
{'containerPath': K8S_AUTH_FOLDER, 'hostPath': K8S_AUTH_FOLDER, 'mode': 'RO'},
{'containerPath': '/etc/passwd', 'hostPath': '/etc/passwd', 'mode': 'RO'},
{'containerPath': '/etc/group', 'hostPath': '/etc/group', 'mode': 'RO'},
]
Expand Down Expand Up @@ -348,14 +346,6 @@ def _get_k8s_spark_env(
spark_env.update({
'spark.master': f'k8s://{k8s_server_address}',
})
elif include_self_managed_configs:
spark_env.update(
{
'spark.kubernetes.authenticate.caCertFile': f'{K8S_AUTH_FOLDER}/{paasta_cluster}-ca.crt',
'spark.kubernetes.authenticate.clientKeyFile': f'{K8S_AUTH_FOLDER}/{paasta_cluster}-client.key',
'spark.kubernetes.authenticate.clientCertFile': f'{K8S_AUTH_FOLDER}/{paasta_cluster}-client.crt',
},
)

return spark_env

Expand Down Expand Up @@ -1083,7 +1073,6 @@ def get_spark_conf(
spark session.
:param aws_region: The default aws region to use
:param service_account_name: The k8s service account to use for spark k8s authentication.
If not provided, it uses cert files at {K8S_AUTH_FOLDER} to authenticate.
:param force_spark_resource_configs: skip the resource/instances recalculation.
This is strongly not recommended.
:returns: spark opts in a dict.
Expand Down
10 changes: 1 addition & 9 deletions tests/spark_config_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,9 +267,7 @@ def mock_paasta_volumes(self, monkeypatch, tmpdir):

@pytest.fixture
def mock_existed_files(self, mock_paasta_volumes):
existed_files = [v.split(':')[0] for v in mock_paasta_volumes] + [
'/host/file1', '/host/file2', '/host/file3', '/etc/pki/spark', '/etc/group', '/etc/passwd',
]
existed_files = [v.split(':')[0] for v in mock_paasta_volumes]
with mock.patch('os.path.exists', side_effect=lambda f: f in existed_files):
yield existed_files

Expand Down Expand Up @@ -321,7 +319,6 @@ def test_get_k8s_docker_volumes_conf(self, volumes):
)

expected_volumes.update({
**_get_k8s_volume('/etc/pki/spark', '/etc/pki/spark', 'ro'),
**_get_k8s_volume('/etc/passwd', '/etc/passwd', 'ro'),
**_get_k8s_volume('/etc/group', '/etc/group', 'ro'),
})
Expand Down Expand Up @@ -1232,11 +1229,6 @@ def assert_kubernetes_conf(self, base_volumes, ui_port, mock_ephemeral_port_rese
'spark.kubernetes.pyspark.pythonVersion': '3',
'spark.kubernetes.container.image': self.docker_image,
'spark.kubernetes.namespace': 'paasta-spark',
'spark.kubernetes.authenticate.caCertFile': f'{spark_config.K8S_AUTH_FOLDER}/{self.cluster}-ca.crt',
'spark.kubernetes.authenticate.clientKeyFile': f'{spark_config.K8S_AUTH_FOLDER}/{self.cluster}-client.key',
'spark.kubernetes.authenticate.clientCertFile': (
f'{spark_config.K8S_AUTH_FOLDER}/{self.cluster}-client.crt'
),
'spark.kubernetes.executor.label.yelp.com/paasta_service': self.service,
'spark.kubernetes.executor.label.yelp.com/paasta_instance': self.instance,
'spark.kubernetes.executor.label.yelp.com/paasta_cluster': self.cluster,
Expand Down
Loading