Skip to content

Commit 2c091b1

Browse files
authored
Merge pull request #165 from Yelp/u/timma/MLCOMPUTE-4789_add_the_capability_to_simplify_the_manually_testing_of_service_config_changes
Added the code to consider the service config path from environment if present
2 parents ff171ef + 5432b4a commit 2c091b1

File tree

2 files changed

+5
-3
lines changed

2 files changed

+5
-3
lines changed

service_configuration_lib/spark_config.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,6 @@
7676
]
7777

7878
SUPPORTED_CLUSTER_MANAGERS = ['kubernetes', 'local']
79-
DEFAULT_SPARK_RUN_CONFIG = '/nail/srv/configs/spark.yaml'
8079
TICKET_NOT_REQUIRED_USERS = {
8180
'batch', # non-human spark-run from batch boxes
8281
'TRON', # tronjobs that run commands like paasta mark-for-deployment

service_configuration_lib/utils.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import errno
44
import hashlib
55
import logging
6+
import os
67
import random
78
import string
89
import uuid
@@ -45,8 +46,10 @@ def load_spark_srv_conf(preset_values=None) -> Tuple[
4546
]:
4647
if preset_values is None:
4748
preset_values = dict()
49+
50+
config_path = os.environ.get('SPARK_RUN_CONFIG_PATH', DEFAULT_SPARK_RUN_CONFIG)
4851
try:
49-
with open(DEFAULT_SPARK_RUN_CONFIG, 'r') as fp:
52+
with open(config_path, 'r') as fp:
5053
loaded_values = yaml.safe_load(fp.read())
5154
spark_srv_conf = {**preset_values, **loaded_values}
5255
spark_constants = spark_srv_conf['spark_constants']
@@ -58,7 +61,7 @@ def load_spark_srv_conf(preset_values=None) -> Tuple[
5861
mandatory_default_spark_srv_conf, spark_costs,
5962
)
6063
except Exception as e:
61-
log.warning(f'Failed to load {DEFAULT_SPARK_RUN_CONFIG}: {e}')
64+
log.warning(f'Failed to load {config_path}: {e}')
6265
raise e
6366

6467

0 commit comments

Comments
 (0)