Skip to content

Commit

Permalink
Fix indexing into event_times
Browse files Browse the repository at this point in the history
  • Loading branch information
mjkramer committed May 5, 2023
1 parent c013e94 commit 458218e
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 6 deletions.
18 changes: 13 additions & 5 deletions cli/simulate_pixels.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,12 +351,20 @@ def run_simulation(input_filename,
logger.start()
logger.take_snapshot()

# create a lookup table for event timestamps
tot_evids = np.unique(tracks[sim.EVENT_SEPARATOR])
# Create a lookup table for event timestamps.

# Event IDs may have some offset (e.g. to make them globally unique within
# an MC production), which we assume to be a multiple of
# sim.MAX_EVENTS_PER_FILE. We remove this offset by taking the modulus with
# sim.MAX_EVENTS_PER_FILE, which gives us zero-based "local" event IDs that
# we can use when indexing into event_times. Note that num_evids is actually
# an upper bound on the number of events, since there may be gaps due to
# events that didn't deposit any energy in the LAr. Such gaps are harmless.
num_evids = (tracks[sim.EVENT_SEPARATOR].max() % sim.MAX_EVENTS_PER_FILE) + 1
if sim.IS_SPILL_SIM:
event_times = cp.arange(len(tot_evids)) * sim.SPILL_PERIOD
event_times = cp.arange(num_evids) * sim.SPILL_PERIOD
else:
event_times = fee.gen_event_times(len(tot_evids), 0)
event_times = fee.gen_event_times(num_evids, 0)

if input_has_vertices and not sim.IS_SPILL_SIM:
uniq_ev, counts = np.unique(vertices['eventID'], return_counts=True)
Expand Down Expand Up @@ -437,7 +445,7 @@ def save_results(event_times, is_first_event, results):
results[key] = np.concatenate([cp.asnumpy(arr) for arr in results[key]], axis=0)

uniq_events = cp.asnumpy(np.unique(results['event_id']))
uniq_event_times = cp.asnumpy(event_times[uniq_events])
uniq_event_times = cp.asnumpy(event_times[uniq_events % sim.MAX_EVENTS_PER_FILE])
if light.LIGHT_SIMULATED:
# prep arrays for embedded triggers in charge data stream
light_trigger_modules = np.array([detector.TPC_TO_MODULE[tpc] for tpc in light.OP_CHANNEL_TO_TPC[results['light_op_channel_idx']][:,0]])
Expand Down
7 changes: 6 additions & 1 deletion larndsim/consts/sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@
IS_SPILL_SIM = True
SPILL_PERIOD = 1.2e6 # units = microseconds

# We mod event IDs by MAX_EVENTS_PER_FILE to get zero-based IDs for indexing
# purposes; see comments in simulate_pixels.py
MAX_EVENTS_PER_FILE = 1000

def set_simulation_properties(simprop_file):
"""
The function loads the detector properties and
Expand All @@ -34,6 +38,7 @@ def set_simulation_properties(simprop_file):
global EVENT_SEPARATOR
global IS_SPILL_SIM
global SPILL_PERIOD
global MAX_EVENTS_PER_FILE


with open(simprop_file) as df:
Expand All @@ -45,4 +50,4 @@ def set_simulation_properties(simprop_file):
EVENT_SEPARATOR = simprop['event_separator']
IS_SPILL_SIM = simprop['is_spill_sim']
SPILL_PERIOD = float(simprop['spill_period'])

MAX_EVENTS_PER_FILE = simprop['max_events_per_file']
1 change: 1 addition & 0 deletions larndsim/simulation_properties/2x2_NuMI_sim.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ write_batch_size: 1 # batches
is_spill_sim: 1 # boolean
spill_period: 1.2e6 # microseconds
event_separator: 'eventID' # 'eventID'
max_events_per_file: 1000
1 change: 1 addition & 0 deletions larndsim/simulation_properties/singles_sim.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ write_batch_size: 1000 # batches
is_spill_sim: 0 # boolean
spill_period: 1.2e6 # microseconds
event_separator: 'eventID' # 'eventID'
max_events_per_file: 1000

1 comment on commit 458218e

@sam-fogarty
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mjkramer I noticed that line 363 will get the incorrect value for number of event IDs sometimes when the default value for max_events_per_file = 1000 (looking at non-spill simulations). So you have to manually change max_events_per_file to match the number of events in the file, which seems like an unnecessary step. In one instance I found that doing this still didn't fix the problem, where in a file with 1M events, num_evids would be incorrect (999977), leading to a "operands could not be broadcast together" error at the vertices['t_event'] = np.repeat(event_times.get(),counts) line. So it looks like we need to be a little careful with how this part of the code is handled.

Please sign in to comment.