Skip to content

Commit 0c5dfc9

Browse files
authored
Remove emitting metrics to clusterman since those tables don't exist anymore. (#150)
1 parent c0e312f commit 0c5dfc9

File tree

2 files changed

+0
-50
lines changed

2 files changed

+0
-50
lines changed

service_configuration_lib/spark_config.py

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1317,35 +1317,6 @@ def generate_clusterman_metrics_entries(
13171317
}
13181318

13191319

1320-
def _emit_resource_requirements(
1321-
clusterman_metrics,
1322-
resources: Mapping[str, int],
1323-
app_name: str,
1324-
spark_web_url: str,
1325-
cluster: str,
1326-
pool: str,
1327-
) -> None:
1328-
1329-
with open(CLUSTERMAN_YAML_FILE_PATH, 'r') as clusterman_yaml_file:
1330-
clusterman_yaml = yaml.safe_load(clusterman_yaml_file.read())
1331-
aws_region = clusterman_yaml['clusters'][cluster]['aws_region']
1332-
1333-
client = clusterman_metrics.ClustermanMetricsBotoClient(
1334-
region_name=aws_region, app_identifier=pool,
1335-
)
1336-
metrics_entries = generate_clusterman_metrics_entries(
1337-
clusterman_metrics,
1338-
resources,
1339-
app_name,
1340-
spark_web_url,
1341-
)
1342-
with client.get_writer(
1343-
clusterman_metrics.APP_METRICS, aggregate_meteorite_dims=True,
1344-
) as writer:
1345-
for metric_key, required_quantity in metrics_entries.items():
1346-
writer.send((metric_key, int(time.time()), required_quantity))
1347-
1348-
13491320
def get_spark_hourly_cost(
13501321
clusterman_metrics,
13511322
resources: Mapping[str, int],
@@ -1387,10 +1358,6 @@ def send_and_calculate_resources_cost(
13871358
is the requested resources.
13881359
"""
13891360
cluster = spark_conf['spark.executorEnv.PAASTA_CLUSTER']
1390-
app_name = spark_conf['spark.app.name']
13911361
resources = get_resources_requested(spark_conf)
13921362
hourly_cost = get_spark_hourly_cost(clusterman_metrics, resources, cluster, pool)
1393-
_emit_resource_requirements(
1394-
clusterman_metrics, resources, app_name, spark_web_url, cluster, pool,
1395-
)
13961363
return hourly_cost, resources

tests/spark_config_test.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1703,7 +1703,6 @@ def test_send_and_calculate_resources_cost(
17031703
mock_get_resources_requested,
17041704
mock_time,
17051705
):
1706-
mock_clusterman_metrics.generate_key_with_dimensions.side_effect = lambda x, _: x
17071706
app_name = 'test-app'
17081707
spark_opts = {
17091708
'spark.executorEnv.PAASTA_CLUSTER': 'test-cluster',
@@ -1714,22 +1713,6 @@ def test_send_and_calculate_resources_cost(
17141713
mock_clusterman_metrics, spark_opts, web_url, 'test-pool',
17151714
)
17161715

1717-
expected_dimension = {'framework_name': app_name, 'webui_url': web_url}
1718-
1719-
mock_clusterman_metrics.generate_key_with_dimensions.assert_has_calls([
1720-
mock.call('requested_cpus', expected_dimension),
1721-
mock.call('requested_mem', expected_dimension),
1722-
])
1723-
1724-
mock_writer = (
1725-
mock_clusterman_metrics.ClustermanMetricsBotoClient.return_value
1726-
.get_writer.return_value.__enter__.return_value
1727-
)
1728-
mock_writer.send.assert_has_calls([
1729-
mock.call(('requested_cpus', int(mock_time), 10)),
1730-
mock.call(('requested_mem', int(mock_time), 2048)),
1731-
])
1732-
17331716
mock_clusterman_metrics.util.costs.estimate_cost_per_hour.assert_called_once_with(
17341717
cluster='test-cluster', pool='test-pool', cpus=10, mem=2048,
17351718
)

0 commit comments

Comments
 (0)