Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ In line with pydicom, support for Python 3.9 will be removed in that version.
### Fixes
* tags not allowed in multi-frame functional groups have been listed
as errors twice (see [#196](../../issues/196))
* fixes to correctly evaluate SR documents (see [#206](../../issues/206)):
* sequences defined recursively in the standard are now supported
* conditions for including macros inside a sequence are now evaluated on the correct level

### Infrastructure
* added Python 3.14 to CI (currently needs development version of `pydicom`)
Expand Down
12 changes: 6 additions & 6 deletions dicom_validator/spec_reader/part3_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,9 +223,7 @@ def _handle_included_attributes(self, text, columns, current_descriptions):
)
return
include_ref = include_node.attrib["linkend"]
if self._current_refs and include_ref == self._current_refs[-1]:
self.logger.debug("Self reference in %s - ignoring.", include_ref)
return
recursive = bool(self._current_refs) and (include_ref == self._current_refs[-1])
self._current_refs.append(include_ref)
element, label = self._get_ref_element_and_label(include_ref)
if label not in self._module_descriptions:
Expand All @@ -234,9 +232,11 @@ def _handle_included_attributes(self, text, columns, current_descriptions):
raise SpecReaderLookupError(
"Failed to lookup include reference " + include_ref
)
# it is allowed to have no attributes (example: Raw Data)
ref_description = self._parse_module_description(ref_node) or {}
self._module_descriptions[label] = ref_description
# recursive reference - prevent recursion
if not recursive:
# it is allowed to have no attributes (example: Raw Data)
ref_description = self._parse_module_description(ref_node) or {}
self._module_descriptions[label] = ref_description
include_attr = {"ref": label}
# this is currently the only occurring condition for includes
cond_prefix = "if and only if "
Expand Down
45 changes: 42 additions & 3 deletions dicom_validator/tests/validator/test_iod_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
pytestmark = pytest.mark.usefixtures("disable_logging")


def new_data_set(tags, ds: Dataset | None = None):
def new_data_set(tags, *, top_level: bool = True):
"""Create a DICOM data set with the given attributes"""
tags = tags or {}
data_set = ds or Dataset()
data_set = Dataset()
if not tags:
return data_set
for tag, value in tags.items():
Expand All @@ -30,9 +30,13 @@ def new_data_set(tags, ds: Dataset | None = None):
if vr == "SQ":
items = []
for item_tags in value:
items.append(new_data_set(item_tags, data_set))
items.append(new_data_set(item_tags, top_level=False))
value = Sequence(items)
data_set[tag] = DataElement(tag, vr, value)
if not top_level:
# this is a sequence item
return data_set

# write the dataset into a file and read it back to ensure the real behavior
if "SOPInstanceUID" not in data_set:
data_set.SOPInstanceUID = "1.2.3"
Expand Down Expand Up @@ -759,3 +763,38 @@ def test_incorrect_parsed_type(self, validator):
"values": ["closed"],
}
assert validator.validate()

@pytest.mark.tag_set(
{
"SOPClassUID": uid.ComprehensiveSRStorage,
"ValueType": "CONTAINER",
"ContinuityOfContent": "SEPARATE",
"ContentSequence": [
{
"RelationshipType": "CONTAINS",
"ValueType": "CONTAINER",
"ContinuityOfContent": "SEPARATE",
"ContentSequence": [
{
"RelationshipType": "HAS OBS CONTEXT",
"ValueType": "UIDREF",
}
],
}
],
}
)
def test_recursive_reference(self, validator):
# regression test for #206
# Content Sequence (0040,A730) is defined recursively in SR Document Content
# and is allowed inside another Content Sequence item
result = validator.validate()
assert not has_tag_error(
result, "SR Document Content", 0x0040_A730, ErrorCode.TagUnexpected
)
# Continuity Of Content (0040,A050) is present with ValueType "CONTAINER",
# but not with ValueType "UIDREF";
# this is correct, so no error shall be shown for Continuity Of Content
assert not has_tag_error(
result, "SR Document Content", 0x0040_A050, ErrorCode.TagMissing
)
11 changes: 11 additions & 0 deletions dicom_validator/tests/validator/test_iod_validator_func_groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ def new_data_set(shared_macros, per_frame_macros):
data_set.ContentDate = "20000101"
data_set.ContentTime = "120000"
data_set.NumberOfFrames = "3"
mask_subtraction_seq = Sequence()
item = Dataset()
item.MaskOperation = "AVG_SUB"
mask_subtraction_seq.append(item)
data_set.MaskSubtractionSequence = mask_subtraction_seq

shared_groups = Sequence()
if shared_macros:
item = Dataset()
Expand Down Expand Up @@ -180,6 +186,11 @@ def test_missing_sequences(self, validator, caplog):
result, "Referenced Image", 0x0008_1140, ErrorCode.TagMissing
)

# Subtraction Item ID, required because of the SOP Class UID value (in dataset root)
assert has_tag_error(result, "Mask", 0x0028_9416, ErrorCode.TagMissing)
# Mask Frame Numbers, required because Mask Operation (inside the sequence item) is AVG_SUB
assert has_tag_error(result, "Mask", 0x0028_6110, ErrorCode.TagMissing)

messages = [rec.message for rec in caplog.records]
assert '\nModule "Irradiation Event Identification":' in messages
assert "(5200,9229) (Shared Functional Groups Sequence):" in messages
Expand Down
43 changes: 30 additions & 13 deletions dicom_validator/validator/iod_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,6 @@ def _validate_attributes(
The dictionary of found errors.
"""
errors = TagErrors()

for tag_id_string, attribute in attributes.items():
if tag_id_string == "modules":
self._validate_func_group_modules(attribute)
Expand All @@ -367,9 +366,21 @@ def _validate_attributes(
self._dataset_stack[-1].stack,
)
)
errors.update(
self._validate_attributes(attribute["items"], True)
# the item attributes are only created at this point,
# where we have descended into the related sequence item level
item_attribute = attribute["items"]
if "group_macros" in item_attribute:
group_macros = item_attribute["group_macros"]
items = item_attribute["items"]
else:
group_macros = None
items = item_attribute
item_attributes = self._expanded_module_info(
items,
group_macros,
expand_items=True,
)
errors.update(self._validate_attributes(item_attributes, True))
self._dataset_stack.pop()

if report_unexpected_tags:
Expand Down Expand Up @@ -601,7 +612,7 @@ def _does_module_strongly_exist(
return True

def _get_existing_tags_of_module(
self, module_info: dict[str, dict]
self, module_info: dict[str, dict | str]
) -> set[DicomTag]:
existing_tag_ids = set()
for tag_id_string in module_info:
Expand All @@ -626,7 +637,7 @@ def _tag_id(self, tag_id_string: str) -> DicomTag:
group = group[:2] + "00"

parents = [(d.tag or 0) for d in self._dataset_stack[1:]] or None
return DicomTag(BaseTag((int(group, 16) << 16) + int(element, 16)), parents)
return DicomTag((int(group, 16) << 16) + int(element, 16), parents)

def _tag_matches(
self, tag_value: Any, operator: ConditionOperator, values: list
Expand All @@ -650,18 +661,20 @@ def _tag_matches(
return False

def _get_module_info(
self, module_ref: str, group_macros: dict[str, dict[str, dict]] | None = None
) -> dict[str, dict]:
self,
module_ref: str,
group_macros: dict[str, dict[str, dict]] | None = None,
) -> dict[str, dict | str]:
return self._expanded_module_info(
module_ref, self._dicom_info.modules[module_ref], group_macros
self._dicom_info.modules[module_ref], group_macros
)

def _expanded_module_info(
self,
module_ref: str,
module_info: dict[str, dict],
group_macros: dict[str, dict[str, dict]] | None,
):
expand_items: bool = False,
) -> dict[str, dict | str]:
expanded_mod_info: dict[str, dict | str] = {}
for k, v in module_info.items():
if k == "include":
Expand All @@ -678,10 +691,14 @@ def _expanded_module_info(
expanded_mod_info.update(
self._get_module_info(ref, group_macros)
)
elif not expand_items and k == "items":
# we shall not create the item attributes at this point, because
# they may have conditions that refer to item inside the sequence,
# and we are currently at a higher dataset level
# instead, we save the information needed to create them lazily
expanded_mod_info[k] = {"items": v, "group_macros": group_macros}
elif isinstance(v, dict):
expanded_mod_info[k] = self._expanded_module_info(
module_ref, v, group_macros
)
expanded_mod_info[k] = self._expanded_module_info(v, group_macros)
else:
expanded_mod_info[k] = v
return expanded_mod_info
Expand Down
4 changes: 4 additions & 0 deletions dicom_validator/validator/validation_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,10 @@ class DicomTag:
tag: BaseTag
parents: list[BaseTag] | None = None

def __init__(self, tag: int, parents: list[int] | None = None):
self.tag = BaseTag(tag)
self.parents = [BaseTag(p) for p in parents] if parents else None

def __hash__(self):
return hash(self.tag + (sum(self.parents) if self.parents else 0))

Expand Down