Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion sentry_sdk/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,26 @@ def iter_stacks(tb):
tb_ = tb_.tb_next


def normalize_context_line(raw_context_line: str) -> str:
"""
Normalize context lines to replace dynamic values with placeholders for better issue grouping.
"""
if not raw_context_line:
return raw_context_line

# Pattern to match multiprocessing.spawn import and spawn_main calls with dynamic parameters
# Examples:
# from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=12, pipe_handle=28)
# from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=34, pipe_handle=56)
multiprocessing_pattern = re.compile(
r"(from multiprocessing\.spawn import spawn_main; spawn_main\(tracker_fd=)\d+(,\s*pipe_handle=)\d+(\).*)"
)
normalized_context_line = multiprocessing_pattern.sub(
r"\1<tracker_fd>\2<pipe_handle>\3", raw_context_line
)
return normalized_context_line


def get_lines_from_file(
filename, # type: str
lineno, # type: int
Expand Down Expand Up @@ -488,7 +508,9 @@ def get_lines_from_file(
strip_string(line.strip("\r\n"), max_length=max_length)
for line in source[lower_bound:lineno]
]
context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length)
raw_context_line = source[lineno].strip("\r\n")
normalized_context_line = normalize_context_line(raw_context_line)
context_line = strip_string(normalized_context_line, max_length=max_length)
post_context = [
strip_string(line.strip("\r\n"), max_length=max_length)
for line in source[(lineno + 1) : upper_bound]
Expand Down
29 changes: 29 additions & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
is_valid_sample_rate,
logger,
match_regex_list,
normalize_context_line,
parse_url,
parse_version,
safe_str,
Expand Down Expand Up @@ -1033,5 +1034,33 @@ def fake_getlines(filename):
assert result == expected_result


def test_normalize_context_line():
test_cases = [
# (Input, Expected Output)
(
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=12, pipe_handle=28)",
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=<tracker_fd>, pipe_handle=<pipe_handle>)",
),
(
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=123, pipe_handle=456)",
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=<tracker_fd>, pipe_handle=<pipe_handle>)",
),
(
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=12, pipe_handle=28)",
"from multiprocessing.spawn import spawn_main; spawn_main(tracker_fd=<tracker_fd>, pipe_handle=<pipe_handle>)",
),
("some_function()", "some_function()"),
(
"some_function(tracker_fd=12, pipe_handle=28)",
"some_function(tracker_fd=12, pipe_handle=28)",
),
("", ""),
]

for input_line, expected_output in test_cases:
result = normalize_context_line(input_line)
assert result == expected_output, f"Failed for input: {input_line}"


def test_package_version_is_none():
assert package_version("non_existent_package") is None