|
2 | 2 | from socket import SO_REUSEADDR
|
3 | 3 | from socket import socket as Socket
|
4 | 4 | from socket import SOL_SOCKET
|
| 5 | +from typing import cast |
5 | 6 | from unittest import mock
|
6 | 7 | from unittest.mock import mock_open
|
7 | 8 | from unittest.mock import patch
|
8 | 9 |
|
9 | 10 | import pytest
|
| 11 | +from typing_extensions import Literal |
10 | 12 |
|
11 | 13 | from service_configuration_lib import utils
|
12 | 14 | from service_configuration_lib.utils import ephemeral_port_reserve_range
|
@@ -74,6 +76,82 @@ def test_generate_pod_template_path(hex_value):
|
74 | 76 | assert utils.generate_pod_template_path() == f'/nail/tmp/spark-pt-{hex_value}.yaml'
|
75 | 77 |
|
76 | 78 |
|
| 79 | +@pytest.mark.parametrize( |
| 80 | + 'mem_str,unit_str,expected_mem', |
| 81 | + ( |
| 82 | + ('13425m', 'm', 13425), # Simple case |
| 83 | + ('138412032', 'm', 132), # Bytes to MB |
| 84 | + ('65536k', 'g', 0.0625), # KB to GB |
| 85 | + ('1t', 'g', 1024), # TB to GB |
| 86 | + ('1.5g', 'm', 1536), # GB to MB with decimal |
| 87 | + ('2048k', 'm', 2), # KB to MB |
| 88 | + ('0.5g', 'k', 524288), # GB to KB |
| 89 | + ('32768m', 't', 0.03125), # MB to TB |
| 90 | + ('1.5t', 'm', 1572864), # TB to MB with decimal |
| 91 | + ), |
| 92 | +) |
| 93 | +def test_get_spark_memory_in_unit(mem_str, unit_str, expected_mem): |
| 94 | + assert expected_mem == utils.get_spark_memory_in_unit(mem_str, cast(Literal['k', 'm', 'g', 't'], unit_str)) |
| 95 | + |
| 96 | + |
| 97 | +@pytest.mark.parametrize( |
| 98 | + 'mem_str,unit_str', |
| 99 | + [ |
| 100 | + ('invalid', 'm'), |
| 101 | + ('1024mb', 'g'), |
| 102 | + ], |
| 103 | +) |
| 104 | +def test_get_spark_memory_in_unit_exceptions(mem_str, unit_str): |
| 105 | + with pytest.raises((ValueError, IndexError)): |
| 106 | + utils.get_spark_memory_in_unit(mem_str, cast(Literal['k', 'm', 'g', 't'], unit_str)) |
| 107 | + |
| 108 | + |
| 109 | +@pytest.mark.parametrize( |
| 110 | + 'spark_conf,expected_mem', |
| 111 | + [ |
| 112 | + ({'spark.driver.memory': '13425m'}, 13425), # Simple case |
| 113 | + ({'spark.driver.memory': '138412032'}, 132), # Bytes to MB |
| 114 | + ({'spark.driver.memory': '65536k'}, 64), # KB to MB |
| 115 | + ({'spark.driver.memory': '1g'}, 1024), # GB to MB |
| 116 | + ({'spark.driver.memory': 'invalid'}, utils.SPARK_DRIVER_MEM_DEFAULT_MB), # Invalid case |
| 117 | + ({'spark.driver.memory': '1.5g'}, 1536), # GB to MB with decimal |
| 118 | + ({'spark.driver.memory': '2048k'}, 2), # KB to MB |
| 119 | + ({'spark.driver.memory': '0.5t'}, 524288), # TB to MB |
| 120 | + ({'spark.driver.memory': '1024m'}, 1024), # MB to MB |
| 121 | + ({'spark.driver.memory': '1.5t'}, 1572864), # TB to MB with decimal |
| 122 | + ], |
| 123 | +) |
| 124 | +def test_get_spark_driver_memory_mb(spark_conf, expected_mem): |
| 125 | + assert expected_mem == utils.get_spark_driver_memory_mb(spark_conf) |
| 126 | + |
| 127 | + |
| 128 | +@pytest.mark.parametrize( |
| 129 | + 'spark_conf,expected_mem_overhead', |
| 130 | + [ |
| 131 | + ({'spark.driver.memoryOverhead': '1024'}, 1024), # Simple case |
| 132 | + ({'spark.driver.memoryOverhead': '1g'}, 1024), # GB to MB |
| 133 | + ({'spark.driver.memory': '10240m', 'spark.driver.memoryOverheadFactor': '0.2'}, 2048), # Custom OverheadFactor |
| 134 | + ({'spark.driver.memory': '10240m'}, 1024), # Using default overhead factor |
| 135 | + ( |
| 136 | + {'spark.driver.memory': 'invalid'}, |
| 137 | + utils.SPARK_DRIVER_MEM_DEFAULT_MB * utils.SPARK_DRIVER_MEM_OVERHEAD_FACTOR_DEFAULT, |
| 138 | + ), |
| 139 | + # Invalid case |
| 140 | + ({'spark.driver.memoryOverhead': '1.5g'}, 1536), # GB to MB with decimal |
| 141 | + ({'spark.driver.memory': '2048k', 'spark.driver.memoryOverheadFactor': '0.05'}, 0.1), |
| 142 | + # KB to MB with custom factor |
| 143 | + ({'spark.driver.memory': '0.5t', 'spark.driver.memoryOverheadFactor': '0.15'}, 78643.2), |
| 144 | + # TB to MB with custom factor |
| 145 | + ({'spark.driver.memory': '1024m', 'spark.driver.memoryOverheadFactor': '0.25'}, 256), |
| 146 | + # MB to MB with custom factor |
| 147 | + ({'spark.driver.memory': '1.5t', 'spark.driver.memoryOverheadFactor': '0.05'}, 78643.2), |
| 148 | + # TB to MB with custom factor |
| 149 | + ], |
| 150 | +) |
| 151 | +def test_get_spark_driver_memory_overhead_mb(spark_conf, expected_mem_overhead): |
| 152 | + assert expected_mem_overhead == utils.get_spark_driver_memory_overhead_mb(spark_conf) |
| 153 | + |
| 154 | + |
77 | 155 | @pytest.fixture
|
78 | 156 | def mock_runtimeenv():
|
79 | 157 | with patch('builtins.open', mock_open(read_data=MOCK_ENV_NAME)) as m:
|
|
0 commit comments