Skip to content

Commit

Permalink
Merge pull request #192 from DUNE/develop-stephen
Browse files Browse the repository at this point in the history
Fixed Sync/Timestamp + Packets-->Segments Backtracking
  • Loading branch information
seg188 authored Jan 16, 2024
2 parents fdb008b + f83a623 commit b584551
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 9 deletions.
19 changes: 12 additions & 7 deletions cli/simulate_pixels.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,13 +754,13 @@ def save_results(event_times, is_first_batch, results, i_mod=-1, light_only=Fals
logger.take_snapshot()

track_ids = cp.asarray(np.arange(segment_ids.shape[0], dtype=int))

segment_ids_arr = cp.asarray(segment_ids)
# We divide the sample in portions that can be processed by the GPU
is_first_batch = True
logger.start()
logger.take_snapshot([0])
i_batch = 0
sync_start = event_times[0] // (fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE) * (fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE)
sync_start = event_times[0] // (fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE) * (fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE) + (fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE)
det_borders = module_borders if mod2mod_variation else detector.TPC_BORDERS
for batch_mask in tqdm(batching.TPCBatcher(all_mod_tracks, tracks, sim.EVENT_SEPARATOR, tpc_batch_size=sim.EVENT_BATCH_SIZE, tpc_borders=det_borders),
desc='Simulating batches...', ncols=80, smoothing=0):
Expand All @@ -774,11 +774,13 @@ def save_results(event_times, is_first_batch, results, i_mod=-1, light_only=Fals

this_event_time = [event_times[ievd % sim.MAX_EVENTS_PER_FILE]]
# forward sync packets
if this_event_time[0] - sync_start > 0:
sync_times = cp.arange(sync_start, this_event_time[0], fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE) #us
if this_event_time[0] - sync_start >= 0:
sync_times = cp.arange(sync_start, this_event_time[0]+1, fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE) #us
#PSS Sync also resets the timestamp in the PACMAN controller, so all of the timestamps in the packs should read 1e7 (for PPS)
sync_times_export = cp.full( sync_times.shape, fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE)
if len(sync_times) > 0:
fee.export_sync_to_hdf5(output_filename, sync_times, i_mod)
sync_start = sync_times[-1] + fee.CLOCK_RESET_PERIOD
fee.export_sync_to_hdf5(output_filename, sync_times_export, i_mod)
sync_start = sync_times[-1] + fee.CLOCK_RESET_PERIOD * fee.CLOCK_CYCLE
# beam trigger is only forwarded to one specific pacman (defined in fee)
if (light.LIGHT_TRIG_MODE == 0 or light.LIGHT_TRIG_MODE == 1) and i_mod == 1:
fee.export_timestamp_trigger_to_hdf5(output_filename, this_event_time, i_mod)
Expand All @@ -790,8 +792,11 @@ def save_results(event_times, is_first_batch, results, i_mod=-1, light_only=Fals
null_light_results_acc['light_event_id'].append(cp.full(1, ievd)) # one event
save_results(event_times, is_first_batch, null_light_results_acc, i_mod, light_only=True)
del null_light_results_acc['light_event_id']
# Nothing to simulate for charge readout?
continue



for itrk in tqdm(range(0, evt_tracks.shape[0], sim.BATCH_SIZE),
delay=1, desc=' Simulating event %i batches...' % ievd, leave=False, ncols=80):
if itrk > 0:
Expand Down Expand Up @@ -951,7 +956,7 @@ def save_results(event_times, is_first_batch, results, i_mod=-1, light_only=Fals
results_acc['unique_pix'].append(unique_pix)
results_acc['current_fractions'].append(current_fractions)
#track_pixel_map[track_pixel_map != -1] += first_trk_id + itrk
track_pixel_map[track_pixel_map != -1] = track_ids[batch_mask][track_pixel_map[track_pixel_map != -1] + itrk]
track_pixel_map[track_pixel_map != -1] = segment_ids_arr[batch_mask][track_pixel_map[track_pixel_map != -1] + itrk]
results_acc['track_pixel_map'].append(track_pixel_map)

# ~~~ Light detector response simulation ~~~
Expand Down
15 changes: 13 additions & 2 deletions larndsim/fee.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,11 +189,11 @@ def export_to_hdf5(event_id_list,

unique_events, unique_events_inv = np.unique(event_id_list[...,0], return_inverse=True)
event_start_time_list = (event_start_times[unique_events_inv] / CLOCK_CYCLE).astype(int)

light_trigger_times = np.empty((0,)) if light_trigger_times is None else light_trigger_times
light_trigger_event_id = np.empty((0,), dtype=int) if light_trigger_event_id is None else light_trigger_event_id

rollover_count = 0
last_time_tick = -1
for itick, adcs in enumerate(adc_list):
ts = adc_ticks_list[itick]
pixel_id = unique_pix[itick]
Expand Down Expand Up @@ -311,17 +311,28 @@ def export_to_hdf5(event_id_list,
p.first_packet = 1
p.assign_parity()

if not time_tick==last_time_tick:
# timestamp packet every time there is a new "message"
# the logic in real data for when a timestamp packet is complicated and depends on pacman CPU speed, packet creation rate
# best simple approximation is that any group of packets with the same timestamp get a single timestamp packet
last_time_tick = time_tick
packets.append(TimestampPacket(timestamp=np.floor(event_start_time_list[0] * CLOCK_CYCLE * units.mus/units.s)) ) # s
packets[-1].chip_key = Key(io_group,0,0)
packets_mc_evt.append([-1])
packets_mc_trk.append([-1] * (ASSOCIATION_COUNT_TO_STORE * 2))
packets_frac.append([0] * (ASSOCIATION_COUNT_TO_STORE*2))
packets_mc_evt.append([event])
packets_mc_trk.append(track_ids[itick])
packets_frac.append(current_fractions[itick][iadc])
packets.append(p)


else:
break

if packets:
packet_list = PacketCollection(packets, read_id=0, message='')
hdf5format.to_file(filename, packet_list, workers=1)

dtype = np.dtype([('event_ids',f'(1,)i8'),
('segment_ids',f'({ASSOCIATION_COUNT_TO_STORE},)i8'),
('fraction', f'({ASSOCIATION_COUNT_TO_STORE},)f8')])
Expand Down

0 comments on commit b584551

Please sign in to comment.