Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 25 additions & 4 deletions src/integrationtest/data_file_check_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,36 @@
from daqdataformats import FragmentErrorBits
from hdf5libs import HDF5RawDataFile

def get_TC_type(h5_file, record_id):
def get_TC_types(h5_file, record_id) -> list:
type_list = []
src_ids = h5_file.get_source_ids_for_fragment_type(record_id, 'Trigger_Candidate')
if len(src_ids) == 1:
for src_id in src_ids:
frag = h5_file.get_frag(record_id, src_id);
if frag.get_size() > 72:
tc = trgdataformats.TriggerCandidate(frag.get_data())
return trgdataformats.trigger_candidate_type_to_string(tc.data.type)
return "kUnknown"
tc_byte_offset = 0
while tc_byte_offset < frag.get_data_size():
tc = trgdataformats.TriggerCandidate(frag.get_data(tc_byte_offset))
type_list.append(trgdataformats.trigger_candidate_type_to_string(tc.data.type))
tc_byte_offset += tc.sizeof()
return type_list

# 17-Nov-2025, KAB: added a function to get the trigger type string (e.g. kTiming)
# based on the trigger_type field in the TriggerRecordHeader. (I also modified the
# data_file_checks that make use of the trigger type to use this new function now.)
# Previously, the TC_type was used. I kept the function that fetched the TC_type above,
# but there may no longer be a need for it. TC_type is not reliable when there is more
# than one TC in the TriggerCandidate fragment in the TriggerRecord. Multiple TCs in
# a single TC fragment can happen when there are multiple triggers configured for a run,
# and two or more of them occur at the same time.
def get_trigger_type_string(h5_file, record_id):
if not h5_file.is_trigger_record_type():
return "kUnknown"
trig_rec = h5_file.get_trigger_record(record_id)
tr_header = trig_rec.get_header_data()
trigger_type = tr_header.trigger_type
type_enum_value = trgdataformats.TriggerCandidateData.Type(power_of_two_exponent(trigger_type))
return trgdataformats.trigger_candidate_type_to_string(type_enum_value)

def get_record_ordinal_strings(record_id, full_record_list):
ordinal_strings = []
Expand Down
33 changes: 23 additions & 10 deletions src/integrationtest/data_file_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import re
from hdf5libs import HDF5RawDataFile
from integrationtest.data_file_check_utilities import (
get_TC_type,
get_TC_types,
get_trigger_type_string,
get_record_ordinal_strings,
get_fragment_count_limits,
get_fragment_size_limits,
Expand Down Expand Up @@ -52,6 +53,18 @@ def sanity_check(datafile):
if triggerrecordheader_count > 1:
print(f"\N{POLICE CARS REVOLVING LIGHT} More than one TriggerRecordHeader in record {event} \N{POLICE CARS REVOLVING LIGHT}")
passed=False

# check that the trigger_type in the TriggerRecordHeader matches one of the
# TriggerCandidates in the TC fragment
h5_file = HDF5RawDataFile(datafile.name)
records = h5_file.get_all_record_ids()
for rec in records:
trigger_type_string = get_trigger_type_string(h5_file, rec)
TC_type_list = get_TC_types(h5_file, rec)
if trigger_type_string not in TC_type_list:
print(f"\N{POLICE CARS REVOLVING LIGHT} The trigger_type in the TriggerRecordHeader ({trigger_type_string}) does not match any of the TriggerCandidates in the record ({TC_type_list}) \N{POLICE CARS REVOLVING LIGHT}")
passed=False

if passed:
print(f"\N{WHITE HEAVY CHECK MARK} Sanity-check passed for file {base_filename}")
else:
Expand Down Expand Up @@ -158,11 +171,11 @@ def check_fragment_count(datafile, params):
h5_file = HDF5RawDataFile(datafile.name)
records = h5_file.get_all_record_ids()
for rec in records:
tc_type_string = get_TC_type(h5_file, rec)
trigger_type_string = get_trigger_type_string(h5_file, rec)
rno_strings = get_record_ordinal_strings(rec, records)
fragment_count_limits = get_fragment_count_limits(params, tc_type_string, rno_strings)
fragment_count_limits = get_fragment_count_limits(params, trigger_type_string, rno_strings)
if (debug_mask & 0x1) != 0:
print(f'DataFileChecks Debug: the fragment count limits are {fragment_count_limits} for TC type {tc_type_string} and record ordinal strings {rno_strings}')
print(f'DataFileChecks Debug: the fragment count limits are {fragment_count_limits} for TC type {trigger_type_string} and record ordinal strings {rno_strings}')
if fragment_count_limits[0] not in min_count_list:
min_count_list.append(fragment_count_limits[0])
if fragment_count_limits[1] not in max_count_list:
Expand Down Expand Up @@ -214,11 +227,11 @@ def check_fragment_sizes(datafile, params):
h5_file = HDF5RawDataFile(datafile.name)
records = h5_file.get_all_record_ids()
for rec in records:
tc_type_string = get_TC_type(h5_file, rec)
trigger_type_string = get_trigger_type_string(h5_file, rec)
rno_strings = get_record_ordinal_strings(rec, records)
size_limits = get_fragment_size_limits(params, tc_type_string, rno_strings)
size_limits = get_fragment_size_limits(params, trigger_type_string, rno_strings)
if (debug_mask & 0x4) != 0:
print(f'DataFileChecks Debug: the fragment size limits are {size_limits} for TC type {tc_type_string} and record ordinal strings {rno_strings}')
print(f'DataFileChecks Debug: the fragment size limits are {size_limits} for TC type {trigger_type_string} and record ordinal strings {rno_strings}')
if size_limits[0] not in min_size_list:
min_size_list.append(size_limits[0])
if size_limits[1] not in max_size_list:
Expand Down Expand Up @@ -267,11 +280,11 @@ def check_fragment_error_flags(datafile, params):
h5_file = HDF5RawDataFile(datafile.name)
records = h5_file.get_all_record_ids()
for rec in records:
tc_type_string = get_TC_type(h5_file, rec)
trigger_type_string = get_trigger_type_string(h5_file, rec)
rno_strings = get_record_ordinal_strings(rec, records)
error_bitmask = get_fragment_error_bitmask(params, tc_type_string, rno_strings)
error_bitmask = get_fragment_error_bitmask(params, trigger_type_string, rno_strings)
if (debug_mask & 0x4) != 0:
print(f'DataFileChecks Debug: the fragment error bitmask is {hex(error_bitmask)} for TC type {tc_type_string} and record ordinal strings {rno_strings}')
print(f'DataFileChecks Debug: the fragment error bitmask is {hex(error_bitmask)} for TC type {trigger_type_string} and record ordinal strings {rno_strings}')
if error_bitmask not in error_mask_list:
error_mask_list.append(error_bitmask)
if subdet_string == "":
Expand Down