From fe4c659edeb999805daebd3eca3852a9baffe004 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:44:39 +0200 Subject: [PATCH 001/174] Correct typo in comment --- opengate/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/base.py b/opengate/base.py index 8f8870bfc..d4bf942e2 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -690,7 +690,7 @@ def process_dynamic_parametrisation(self, params): extra_params = {} extra_params["auto_changer"] = params.pop( "auto_changer", True - ) # True of key not found (default) + ) # True if key not found (default) if extra_params["auto_changer"] not in (False, True): fatal( f"Received wrong value type for 'auto_changer': got {type(extra_params['auto_changer'])}, " From b66c66504babe0ef4dc6aa7f0fa6c20224158817 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:45:14 +0200 Subject: [PATCH 002/174] Implement DynamicGateObject.reassign_subset_of_dynamic_params() --- opengate/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/opengate/base.py b/opengate/base.py index d4bf942e2..30cb3caa9 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -750,6 +750,14 @@ def add_dynamic_parametrisation(self, name=None, **params): s += f"{k}: {v}\n" log.debug(s) + def reassign_subset_of_dynamic_params(self, subset): + # loop over all dynamic parametrisations of this object, + for param in self.user_info["dynamic_params"].values(): + for k, v in param.items(): + # extract the subset of entries to the list that are relevant to this process + if k in self.dynamic_user_info: + param[k] = [v[i] for i in subset] + def create_changers(self): # this base class implementation is here to keep inheritance intact. return [] From 0456d040afe01040598efbd70727edde380e612f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:46:05 +0200 Subject: [PATCH 003/174] Add attribute process_index to SimulationEngine (not used yet) --- opengate/engines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/engines.py b/opengate/engines.py index d9f31a558..c03f6ff2e 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1004,6 +1004,7 @@ def __init__(self, simulation, new_process=False): # this is only for info. # Process handling is done in Simulation class, not in SimulationEngine! self.new_process = new_process + self.process_index = None # LATER : option to wait the end of completion or not From 30526c8ac83e2bb27667667bd4e20d3adaf031aa Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:19 +0200 Subject: [PATCH 004/174] First steps towards multi processing --- opengate/managers.py | 88 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 425b11cf4..b088a966c 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -7,6 +7,7 @@ import os from pathlib import Path import weakref +import multiprocessing import opengate_core as g4 @@ -1460,6 +1461,7 @@ def __init__(self, name="simulation", **kwargs): self._current_random_seed = None self.expected_number_of_events = None + self.mapping_run_timing_intervals = {} def __str__(self): s = ( @@ -1653,7 +1655,7 @@ def add_filter(self, filter_type, name): def multithreaded(self): return self.number_of_threads > 1 or self.force_multithread_mode - def _run_simulation_engine(self, start_new_process): + def _run_simulation_engine(self, start_new_process, process_index=None): """Method that creates a simulation engine in a context (with ...) and runs a simulation. Args: @@ -1668,10 +1670,55 @@ def _run_simulation_engine(self, start_new_process): with SimulationEngine(self) as se: se.new_process = start_new_process se.init_only = self.init_only + se.process_index = process_index output = se.run_engine() return output - def run(self, start_new_process=False): + def generate_run_timing_interval_map(self, number_of_processes): + if number_of_processes % len(self.run_timing_intervals) != 0: + fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}.") + + number_of_processes_per_run = int(number_of_processes / len(self.run_timing_intervals)) + run_timing_interval_map = {} + process_index = 0 + for i, rti in enumerate(self.run_timing_intervals): + t_start, t_end = rti + duration_original = t_end - t_start + duration_in_process = duration_original / number_of_processes_per_run + t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_all = [t_start] + t_intermediate + [t_end] + for t_s, t_e in zip(t_all[:-1], t_all[1:]): + run_timing_interval_map[process_index] = { + 'run_timing_intervals': [[t_s, t_e]], + 'lut_original_rti': [i] + } + process_index += 1 + return run_timing_interval_map + + def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): + # Important: this method is intended to run in a processes spawned off the main process. + # Therefore, self is actually a separate instance from the original simulation + # and we can safely adapt it in this process. + + # adapt the output_dir + self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + print("self.output_dir = ", self.output_dir) + + # adapt the run timing intervals in + self.run_timing_intervals = run_timing_intervals + # adapt all dynamic volumes + for vol in self.volume_manager.dynamic_volumes: + vol.reassign_subset_of_dynamic_params(lut_original_rti) + print(process_index) + print(f'Volume {vol.name}:') + print(vol.user_info["dynamic_params"]) + + output = self._run_simulation_engine(False, process_index=process_index) + print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + return output + + def run(self, start_new_process=False, number_of_sub_processes=0): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1679,6 +1726,9 @@ def run(self, start_new_process=False): "Run the simulation with one thread." ) + if number_of_sub_processes == 1: + start_new_process = True + # prepare sub process if start_new_process is True: """Important: put: @@ -1705,6 +1755,40 @@ def run(self, start_new_process=False): source.fTotalSkippedEvents = s.user_info.fTotalSkippedEvents source.fTotalZeroEvents = s.user_info.fTotalZeroEvents + elif number_of_sub_processes > 1: + run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + try: + multiprocessing.set_start_method("spawn") + except RuntimeError: + print("Could not set start method 'spawn'.") + pass + # q = multiprocessing.Queue() + with multiprocessing.Pool(len(run_timing_interval_map)) as pool: + print("pool._outqueue: ", pool._outqueue) # DEMO + results = [pool.apply_async(self.run_in_process, + (k, v['run_timing_intervals'], v['lut_original_rti'],)) + for k, v in run_timing_interval_map.items()] + # `.apply_async()` immediately returns AsyncResult (ApplyResult) object + print(results[0]) # DEMO + list_of_output = [res.get() for res in results] + print(f'list_of_output: {list_of_output}') + return list_of_output + # processes = [] + # for k, v in run_timing_interval_map.items(): + # p = multiprocessing.Process( + # target=target_func, + # args=(q, self.run_in_process, k, v['run_timing_intervals'], v['lut_original_rti']) + # ) + # p.start() + # processes.append(p) + # for p in processes: + # p.join() # (timeout=10) # timeout might be needed + # + # try: + # output = q.get(block=False) + # except queue.Empty: + # fatal("The queue is empty. The spawned process probably died.") + # return output else: # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. From a7434e55d4d3012ffd2537281cadd359cbea648a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:36 +0200 Subject: [PATCH 005/174] Add test080_multiprocessing_1.py --- .../tests/src/test080_multiprocessing_1.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100755 opengate/tests/src/test080_multiprocessing_1.py diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py new file mode 100755 index 000000000..3a4aedb30 --- /dev/null +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from opengate.utility import g4_units +import opengate as gate +from opengate.tests.utility import get_default_test_paths + + + +if __name__ == "__main__": + paths = get_default_test_paths( + __file__, output_folder="test080" + ) + + s = g4_units.s + + sim = gate.Simulation() + sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] + sim.output_dir = paths.output + + box1 = sim.add_volume('BoxVolume', 'box1') + box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + + n_proc = 4 * len(sim.run_timing_intervals) + run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) + print(run_timing_interval_map) + + output = sim.run(number_of_sub_processes=n_proc) + + + print("*** output ***") + for o in output: + print(o) + + print(f"ID of the main sim: {id(sim)}") + + ids = [e[2] for e in output] + assert id(sim) not in ids + + From 605192f1e5ee18abd4fc4dcd345318b7c96e5051 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:56 +0200 Subject: [PATCH 006/174] Add test030_dose_motion_dynamic_param_multiproc.py --- ...030_dose_motion_dynamic_param_multiproc.py | 133 ++++++++++++++++++ 1 file changed, 133 insertions(+) create mode 100755 opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py diff --git a/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py b/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py new file mode 100755 index 000000000..b0c00b92d --- /dev/null +++ b/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import opengate as gate +from scipy.spatial.transform import Rotation +from opengate.tests import utility + +if __name__ == "__main__": + paths = utility.get_default_test_paths( + __file__, "gate_test029_volume_time_rotation", "test030" + ) + + # create the simulation + sim = gate.Simulation() + + # main options + sim.g4_verbose = False + sim.visu = False + sim.random_seed = 983456 + sim.output_dir = paths.output + + # units + m = gate.g4_units.m + mm = gate.g4_units.mm + cm = gate.g4_units.cm + um = gate.g4_units.um + nm = gate.g4_units.nm + MeV = gate.g4_units.MeV + Bq = gate.g4_units.Bq + sec = gate.g4_units.second + + # change world size + sim.world.size = [1 * m, 1 * m, 1 * m] + + # add a simple fake volume to test hierarchy + # translation and rotation like in the Gate macro + fake = sim.add_volume("Box", "fake") + fake.size = [40 * cm, 40 * cm, 40 * cm] + fake.translation = [1 * cm, 2 * cm, 3 * cm] + fake.material = "G4_AIR" + fake.color = [1, 0, 1, 1] + + # waterbox + waterbox = sim.add_volume("Box", "waterbox") + waterbox.mother = fake + waterbox.size = [20 * cm, 20 * cm, 20 * cm] + waterbox.translation = [-3 * cm, -2 * cm, -1 * cm] + waterbox.rotation = Rotation.from_euler("y", -20, degrees=True).as_matrix() + waterbox.material = "G4_WATER" + waterbox.color = [0, 0, 1, 1] + + # physics + sim.physics_manager.set_production_cut("world", "all", 700 * um) + + # default source for tests + # the source is fixed at the center, only the volume will move + source = sim.add_source("GenericSource", "mysource") + source.energy.mono = 150 * MeV + source.particle = "proton" + source.position.type = "disc" + source.position.radius = 5 * mm + source.direction.type = "momentum" + source.direction.momentum = [0, 0, 1] + source.activity = 30000 * Bq + + # add dose actor + dose = sim.add_actor("DoseActor", "dose") + dose.output_filename = "test030.mhd" + dose.attached_to = waterbox + dose.size = [99, 99, 99] + mm = gate.g4_units.mm + dose.spacing = [2 * mm, 2 * mm, 2 * mm] + dose.translation = [2 * mm, 3 * mm, -2 * mm] + dose.edep.keep_data_per_run = True + dose.edep.auto_merge = True + dose.edep_uncertainty.active = True + + # add stat actor + stats = sim.add_actor("SimulationStatisticsActor", "Stats") + + # motion + n = 3 + interval_length = 1 * sec / n + sim.run_timing_intervals = [ + (i * interval_length, (i + 1) * interval_length) for i in range(n) + ] + gantry_angles_deg = [i * 20 for i in range(n)] + ( + dynamic_translations, + dynamic_rotations, + ) = gate.geometry.utility.get_transform_orbiting( + initial_position=fake.translation, axis="Y", angle_deg=gantry_angles_deg + ) + fake.add_dynamic_parametrisation( + translation=dynamic_translations, rotation=dynamic_rotations + ) + + # start simulation + sim.run(number_of_sub_processes=3 * len(sim.run_timing_intervals)) + + # # print results at the end + # print(stats) + # + # # tests + # stats_ref = utility.read_stat_file(paths.output_ref / "stats030.txt") + # is_ok = utility.assert_stats(stats, stats_ref, 0.11) + # + # print() + # gate.exception.warning("Difference for EDEP") + # is_ok = ( + # utility.assert_images( + # paths.output_ref / "test030-edep.mhd", + # dose.edep.get_output_path(), + # stats, + # tolerance=30, + # ignore_value=0, + # ) + # and is_ok + # ) + # + # print("\nDifference for uncertainty") + # is_ok = ( + # utility.assert_images( + # paths.output_ref / "test030-edep_uncertainty.mhd", + # dose.edep_uncertainty.get_output_path(), + # stats, + # tolerance=15, + # ignore_value=1, + # ) + # and is_ok + # ) + # + # utility.test_ok(is_ok) From e4bc3a7ac18ae95bfab139d08f2bcbae8bd2f66d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 23:54:15 +0000 Subject: [PATCH 007/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/managers.py | 47 +++++++++++++------ .../tests/src/test080_multiprocessing_1.py | 14 ++---- 2 files changed, 38 insertions(+), 23 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index b088a966c..2f19d0a66 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1676,22 +1676,29 @@ def _run_simulation_engine(self, start_new_process, process_index=None): def generate_run_timing_interval_map(self, number_of_processes): if number_of_processes % len(self.run_timing_intervals) != 0: - fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}.") + fatal( + "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}." + ) - number_of_processes_per_run = int(number_of_processes / len(self.run_timing_intervals)) + number_of_processes_per_run = int( + number_of_processes / len(self.run_timing_intervals) + ) run_timing_interval_map = {} process_index = 0 for i, rti in enumerate(self.run_timing_intervals): t_start, t_end = rti duration_original = t_end - t_start duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_intermediate = [ + t_start + (j + 1) * duration_in_process + for j in range(number_of_processes_per_run - 1) + ] t_all = [t_start] + t_intermediate + [t_end] for t_s, t_e in zip(t_all[:-1], t_all[1:]): run_timing_interval_map[process_index] = { - 'run_timing_intervals': [[t_s, t_e]], - 'lut_original_rti': [i] + "run_timing_intervals": [[t_s, t_e]], + "lut_original_rti": [i], } process_index += 1 return run_timing_interval_map @@ -1702,7 +1709,7 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in @@ -1711,11 +1718,13 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): for vol in self.volume_manager.dynamic_volumes: vol.reassign_subset_of_dynamic_params(lut_original_rti) print(process_index) - print(f'Volume {vol.name}:') + print(f"Volume {vol.name}:") print(vol.user_info["dynamic_params"]) output = self._run_simulation_engine(False, process_index=process_index) - print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + print( + process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti + ) return output def run(self, start_new_process=False, number_of_sub_processes=0): @@ -1756,7 +1765,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0): source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + run_timing_interval_map = self.generate_run_timing_interval_map( + number_of_sub_processes + ) try: multiprocessing.set_start_method("spawn") except RuntimeError: @@ -1765,13 +1776,21 @@ def run(self, start_new_process=False, number_of_sub_processes=0): # q = multiprocessing.Queue() with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [pool.apply_async(self.run_in_process, - (k, v['run_timing_intervals'], v['lut_original_rti'],)) - for k, v in run_timing_interval_map.items()] + results = [ + pool.apply_async( + self.run_in_process, + ( + k, + v["run_timing_intervals"], + v["lut_original_rti"], + ), + ) + for k, v in run_timing_interval_map.items() + ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f'list_of_output: {list_of_output}') + print(f"list_of_output: {list_of_output}") return list_of_output # processes = [] # for k, v in run_timing_interval_map.items(): diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index 3a4aedb30..d79819b2e 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -5,11 +5,8 @@ from opengate.tests.utility import get_default_test_paths - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s @@ -17,8 +14,10 @@ sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output - box1 = sim.add_volume('BoxVolume', 'box1') - box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + box1 = sim.add_volume("BoxVolume", "box1") + box1.add_dynamic_parametrisation( + translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))] + ) n_proc = 4 * len(sim.run_timing_intervals) run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) @@ -26,7 +25,6 @@ output = sim.run(number_of_sub_processes=n_proc) - print("*** output ***") for o in output: print(o) @@ -35,5 +33,3 @@ ids = [e[2] for e in output] assert id(sim) not in ids - - From 523fdc9cae26b2ce909939d2618e7318ea7a4155 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:08 +0200 Subject: [PATCH 008/174] Implement MultiProcessingHandler classes --- opengate/processing.py | 100 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 99 insertions(+), 1 deletion(-) diff --git a/opengate/processing.py b/opengate/processing.py index 7de54292e..fa3d63c7a 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -2,7 +2,7 @@ import queue from .exception import fatal - +from .base import GateObject # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): @@ -28,3 +28,101 @@ def dispatch_to_subprocess(func, *args, **kwargs): return q.get(block=False) except queue.Empty: fatal("The queue is empty. The spawned process probably died.") + + +def _setter_hook_number_of_processes(self, number_of_processes): + if self.number_of_processes != number_of_processes: + self._dispatch_configuration = {} + self.process_run_index_map = {} + self.inverse_process_to_run_index_map = {} + return number_of_processes + + +class MultiProcessingHandlerBase(GateObject): + + user_info_defaults = { + 'number_of_processes': ( + 1, + { + "doc": "In how many parallel process should the simulation be run? " + "Must be a multiple of the number of run timing intervals. ", + "setter_hook": _setter_hook_number_of_processes, + } + ) + } + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._dispatch_configuration = {} + self.process_run_index_map = {} + self.inverse_process_to_run_index_map = {} + + @property + def original_run_timing_intervals(self): + return self.simulation.run_timing_intervals + + @property + def dispatch_configuration(self): + return self._dispatch_configuration + + @dispatch_configuration.setter + def dispatch_configuration(self, config): + self._dispatch_configuration = config + self.update_process_to_run_index_map() + self.update_inverse_process_to_run_index_map() + + @property + def original_run_timing_indices(self): + return [i for i in range(len(self.original_run_timing_intervals))] + + def initialize(self): + self.generate_dispatch_configuration() + + def generate_dispatch_configuration(self): + raise NotImplementedError + + def update_process_to_run_index_map(self): + """Creates a mapping (process index, local run index) -> (original run index) + """ + if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: + fatal("Unable to update the mapping 'process to original run index' " + "because no dispatch configuration is available. ") + p_r_map = {} + for k, v in self.dispatch_configuration.items(): + for lri, ori in enumerate(v['lut_original_rti']): + p_r_map[(k, lri)] = ori + self.process_run_index_map = p_r_map + + def update_inverse_process_to_run_index_map(self): + p_r_map_inv = dict([(i, []) for i in set(self.process_run_index_map.values())]) + for k, v in self.process_run_index_map.items(): + p_r_map_inv[v].append(k) + self.inverse_process_to_run_index_map = p_r_map_inv + + +class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): + + def generate_dispatch_configuration(self): + if self.number_of_processes % len(self.original_run_timing_intervals) != 0: + fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}.") + + number_of_processes_per_run = int(self.number_of_processes / len(self.original_run_timing_intervals)) + dispatch_configuration = {} + process_index = 0 + for i, rti in enumerate(self.original_run_timing_intervals): + t_start, t_end = rti + duration_original = t_end - t_start + duration_in_process = duration_original / number_of_processes_per_run + t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_all = [t_start] + t_intermediate + [t_end] + for t_s, t_e in zip(t_all[:-1], t_all[1:]): + dispatch_configuration[process_index] = { + 'run_timing_intervals': [[t_s, t_e]], + 'lut_original_rti': [i], + 'process_id': None + } + process_index += 1 + self.dispatch_configuration = dispatch_configuration + return dispatch_configuration + From cea24cf629e7a2c07bc9c68dfb4ce18bb5b1c8e5 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:21 +0200 Subject: [PATCH 009/174] create test080_multiprocessing_handler.py --- .../src/test080_multiprocessing_handler.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100755 opengate/tests/src/test080_multiprocessing_handler.py diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py new file mode 100755 index 000000000..daabbff61 --- /dev/null +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from opengate.utility import g4_units +import opengate as gate +from opengate.tests.utility import get_default_test_paths +from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval + + + +if __name__ == "__main__": + paths = get_default_test_paths( + __file__, output_folder="test080" + ) + + s = g4_units.s + + sim = gate.Simulation() + sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] + sim.output_dir = paths.output + + box1 = sim.add_volume('BoxVolume', 'box1') + box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + + n_proc = 4 * len(sim.run_timing_intervals) + + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', + simulation=sim, + number_of_processes=n_proc) + From ae376751cb722bcfd2b0c46948cf5113fcc77bcb Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:13:00 +0200 Subject: [PATCH 010/174] Implement import_data_from_actor_output() --- opengate/actors/actoroutput.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 011716734..ea59795dd 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -367,6 +367,9 @@ def load_data(self, which): f"but it should be implemented in the specific derived class" ) + def import_data_from_actor_output(self, *actor_output, **kwargs): + raise NotImplementedError("This is the base class. ") + class MergeableActorOutput(ActorOutputBase): @@ -416,6 +419,22 @@ def end_of_simulation(self, **kwargs): f"A developer needs to fix this. " ) + def import_data_from_actor_output(self, *actor_output, discard_existing_data=True): + run_indices_to_import = set() + for ao in actor_output: + run_indices_to_import.union(ao.data_per_run.keys()) + which_output_per_run_index = dict([(r, [ao for ao in actor_output if r in ao.data_per_run]) for r in run_indices_to_import]) + for r in run_indices_to_import: + data_to_import = [ao.data_per_run[r] for ao in which_output_per_run_index[r]] + if discard_existing_data is False and r in self.data_per_run: + data_to_import.append(self.data_per_run[r]) + self.data_per_run[r] = merge_data(data_to_import) + merged_data_to_import = [ao.merged_data for ao in actor_output if ao.merged_data is not None] + if discard_existing_data is False and self.merged_data is not None: + merged_data_to_import.append(self.merged_data) + if len(merged_data_to_import) > 0: + self.merged_data = merge_data(merged_data_to_import) + class ActorOutputUsingDataItemContainer(MergeableActorOutput): From 9c74aa28cd61d7fd3af8f97e03de6e805151ca58 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:14:40 +0200 Subject: [PATCH 011/174] Implement import_user_output_from_actor() --- opengate/actors/base.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index ad579ca81..9075ee242 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -423,6 +423,19 @@ def recover_user_output(self, actor): for v in self.interfaces_to_user_output.values(): v.belongs_to_actor = self + def import_user_output_from_actor(self, *actor): + if not all([self.type_name == a.type_name for a in actor]): + fatal("An actor can only import user output from the same type of actor.") + if len(actor) == 1: + self.recover_user_output(actor[0]) + else: + for k in self.user_output: + try: + self.user_output[k].import_data_from_actor_output(*[a.user_output[k] for a in actor]) + except NotImplementedError: + self.warn_user(f"User output {k} in {self.type_name} cannot be imported " + f"because the function is not yet implemented for this type of output.") + def store_output_data(self, output_name, run_index, *data): self._assert_output_exists(output_name) self.user_output[output_name].store_data(run_index, *data) From 87af53e3013a25b21110bfc9057285e159e501f6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:14:56 +0200 Subject: [PATCH 012/174] Implement import_user_output_from_actor in ActorBase --- opengate/actors/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 9075ee242..826f3452d 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -464,3 +464,6 @@ def StartSimulationAction(self): def EndSimulationAction(self): """Default virtual method for inheritance""" pass + + def FinalizeSimulation(self): + pass From 196d954d11ff4f1b0143ed46dc0780ec2e29da37 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:16:11 +0200 Subject: [PATCH 013/174] Change local variable name in reassign_dynamic_params_for_process() --- opengate/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/base.py b/opengate/base.py index 30cb3caa9..d0de201cd 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -750,13 +750,13 @@ def add_dynamic_parametrisation(self, name=None, **params): s += f"{k}: {v}\n" log.debug(s) - def reassign_subset_of_dynamic_params(self, subset): + def reassign_dynamic_params_for_process(self, run_indices): # loop over all dynamic parametrisations of this object, for param in self.user_info["dynamic_params"].values(): for k, v in param.items(): # extract the subset of entries to the list that are relevant to this process if k in self.dynamic_user_info: - param[k] = [v[i] for i in subset] + param[k] = [v[i] for i in run_indices] def create_changers(self): # this base class implementation is here to keep inheritance intact. From 138a44f81eaecfbe03072c063360d51107ed82a3 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:17:09 +0200 Subject: [PATCH 014/174] Implement FinalizeSimulation() in VoxelDepositActor --- opengate/actors/doseactors.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/opengate/actors/doseactors.py b/opengate/actors/doseactors.py index 85c9d6f80..9f6c15c8d 100644 --- a/opengate/actors/doseactors.py +++ b/opengate/actors/doseactors.py @@ -246,12 +246,17 @@ def EndOfRunActionMasterThread(self, run_index): u.end_of_run(run_index) return 0 - def EndSimulationAction(self): - # inform actor output that this simulation is over and write data + def inform_user_output_about_end(self): for u in self.user_output.values(): if u.get_active(item="all"): u.end_of_simulation() + def EndSimulationAction(self): + self.inform_user_output_about_end() + + def FinalizeSimulation(self): + self.inform_user_output_about_end() + def compute_std_from_sample( number_of_samples, value_array, squared_value_array, correct_bias=False From a796bec9ab4326b76b1c64b259344005c54df2a6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:17:40 +0200 Subject: [PATCH 015/174] Add simulation_id to SimulationOutput --- opengate/engines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/engines.py b/opengate/engines.py index c03f6ff2e..0a39323b7 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -910,6 +910,7 @@ def __init__(self): self.sources_by_thread = {} self.pid = os.getpid() self.ppid = os.getppid() + self.simulation_id = None self.current_random_seed = None self.user_hook_log = [] self.warnings = None From 1375e4fb922c1a0ffc8a92b600d21b5b63a44e8d Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:18:23 +0200 Subject: [PATCH 016/174] Implement SimulationOutput.store_output_from_simulation_engine() --- opengate/engines.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/opengate/engines.py b/opengate/engines.py index 0a39323b7..6974387b9 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -915,6 +915,15 @@ def __init__(self): self.user_hook_log = [] self.warnings = None + def store_output_from_simulation_engine(self, simulation_engine): + self.store_actors(simulation_engine) + self.store_sources(simulation_engine) + self.store_hook_log(simulation_engine) + self.current_random_seed = simulation_engine.current_random_seed + self.expected_number_of_events = simulation_engine.source_engine.expected_number_of_events + self.warnings = simulation_engine.simulation.warnings + self.simulation_id = id(simulation_engine.simulation) + def store_actors(self, simulation_engine): self.actors = simulation_engine.simulation.actor_manager.actors for actor in self.actors.values(): @@ -1161,12 +1170,14 @@ def run_engine(self): self.user_hook_after_run(self) # prepare the output - output.store_actors(self) - output.store_sources(self) - output.store_hook_log(self) - output.current_random_seed = self.current_random_seed - output.expected_number_of_events = self.source_engine.expected_number_of_events - output.warnings = self.simulation.warnings + output.store_output_from_simulation_engine(self) + # output.store_actors(self) + # output.store_sources(self) + # output.store_hook_log(self) + # output.current_random_seed = self.current_random_seed + # output.expected_number_of_events = self.source_engine.expected_number_of_events + # output.warnings = self.simulation.warnings + # output.simulation_id = id(self.simulation) return output From fc90573746c021abc1a0bbde6446d8390371d8a3 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:20:44 +0200 Subject: [PATCH 017/174] Simplify code in run_engine() --- opengate/engines.py | 36 +++++++++--------------------------- 1 file changed, 9 insertions(+), 27 deletions(-) diff --git a/opengate/engines.py b/opengate/engines.py index 6974387b9..f90adf1bf 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1149,36 +1149,18 @@ def run_engine(self): log.info("Simulation: initialize user fct") self.user_hook_after_init(self) - # if init only, we stop - if self.simulation.init_only: - output.store_actors(self) - output.store_sources(self) - output.store_hook_log(self) - output.current_random_seed = self.current_random_seed - output.expected_number_of_events = ( - self.source_engine.expected_number_of_events - ) - return output - - # go - self.start_and_stop() - - # start visualization if vrml or gdml - self.visu_engine.start_visualisation() - if self.user_hook_after_run: - log.info("Simulation: User hook after run") - self.user_hook_after_run(self) + # if init only, we skip the actual run + if not self.simulation.init_only: + # go + self.start_and_stop() + # start visualization if vrml or gdml + self.visu_engine.start_visualisation() + if self.user_hook_after_run: + log.info("Simulation: User hook after run") + self.user_hook_after_run(self) # prepare the output output.store_output_from_simulation_engine(self) - # output.store_actors(self) - # output.store_sources(self) - # output.store_hook_log(self) - # output.current_random_seed = self.current_random_seed - # output.expected_number_of_events = self.source_engine.expected_number_of_events - # output.warnings = self.simulation.warnings - # output.simulation_id = id(self.simulation) - return output def start_and_stop(self): From 91f6b7fff76d33dabc1ab15a328c2babcef59dfb Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:21:13 +0200 Subject: [PATCH 018/174] Implement SimulationMetaData class --- opengate/managers.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 2f19d0a66..573ff4f46 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1193,6 +1193,39 @@ def print_material_database_names(self): print(self.dump_material_database_names()) +class SimulationMetaData(Box): + + def __init__(self, *args, simulation_output=None, **kwargs): + super().__init__(*args, **kwargs) + self.warnings = [] + self.expected_number_of_events = 0 # FIXME workaround + self.user_hook_log = [] + self.current_random_seed = None + self.number_of_sub_processes = None + self.start_new_process = None + if simulation_output is not None: + self.import_from_simulation_output(simulation_output) + + def reset_warnings(self): + self.warnings = [] + + def import_from_simulation_meta_data(self, *meta_data): + for m in meta_data: + self.warnings.extend(m.warnings) + self.expected_number_of_events += m.expected_number_of_events + self.user_hook_log.extend(m.user_hook_log) + if self.current_random_seed is None: + self.current_random_seed = m.current_random_seed + + def import_from_simulation_output(self, *sim_output): + for so in sim_output: + self.warnings.extend(so.warnings) + self.expected_number_of_events += so.expected_number_of_events + self.user_hook_log.extend(so.user_hook_log) + if self.current_random_seed is None: + self.current_random_seed = so.current_random_seed + + def setter_hook_verbose_level(self, verbose_level): try: level = int(verbose_level) From 722552aaf70db5450a0113cd07a3889eb4ac1936 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:23:03 +0200 Subject: [PATCH 019/174] Use SimulationMetaData in Simulation --- opengate/managers.py | 30 +++++++++--------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 573ff4f46..f8d0bd216 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1471,13 +1471,13 @@ def __init__(self, name="simulation", **kwargs): kwargs.pop("simulation", None) super().__init__(name=name, **kwargs) - # list to store warning messages issued somewhere in the simulation - self._user_warnings = [] - # for debug only self.verbose_getstate = False self.verbose_close = False + self.meta_data = SimulationMetaData() + self.meta_data_per_process = {} + # main managers self.volume_manager = VolumeManager(self) self.source_manager = SourceManager(self) @@ -1488,13 +1488,12 @@ def __init__(self, name="simulation", **kwargs): # hook functions self.user_hook_after_init = None self.user_hook_after_run = None - self.user_hook_log = None - - # read-only info - self._current_random_seed = None - self.expected_number_of_events = None - self.mapping_run_timing_intervals = {} + def __getattr__(self, item): + try: + return self.meta_data[item] + except KeyError: + raise AttributeError(f"Item {item} not found in {type(self)}, nor in the simulation meta data. ") def __str__(self): s = ( @@ -1522,21 +1521,10 @@ def use_multithread(self): def world(self): return self.volume_manager.world_volume - @property - def current_random_seed(self): - return self._current_random_seed - - @property - def warnings(self): - return self._user_warnings - - def reset_warnings(self): - self._user_warnings = [] - def warn_user(self, message): # We need this specific implementation because the Simulation does not hold a reference 'simulation', # as required by the base class implementation of warn_user() - self._user_warnings.append(message) + self.warnings.append(message) super().warn_user(message) def to_dictionary(self): From c7a317be6f779395e7d56115df6719222ecec6a5 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:24:15 +0200 Subject: [PATCH 020/174] Update MultiProcessingHandlerBase class --- opengate/processing.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/opengate/processing.py b/opengate/processing.py index fa3d63c7a..27ac14901 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -68,37 +68,45 @@ def dispatch_configuration(self): @dispatch_configuration.setter def dispatch_configuration(self, config): self._dispatch_configuration = config - self.update_process_to_run_index_map() - self.update_inverse_process_to_run_index_map() + self.update_process_to_run_index_maps() - @property - def original_run_timing_indices(self): - return [i for i in range(len(self.original_run_timing_intervals))] + def assert_dispatch_configuration(self): + if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: + fatal("No dispatch configuration is available. ") def initialize(self): self.generate_dispatch_configuration() + def get_original_run_timing_indices_for_process(self, process_index): + return self.dispatch_configuration[process_index]['lut_original_rti'] + + def get_run_timing_intervals_for_process(self, process_index): + return self.dispatch_configuration[process_index]['run_timing_intervals'] + def generate_dispatch_configuration(self): raise NotImplementedError - def update_process_to_run_index_map(self): + def update_process_to_run_index_maps(self): """Creates a mapping (process index, local run index) -> (original run index) """ - if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: - fatal("Unable to update the mapping 'process to original run index' " - "because no dispatch configuration is available. ") + self.assert_dispatch_configuration() + p_r_map = {} for k, v in self.dispatch_configuration.items(): for lri, ori in enumerate(v['lut_original_rti']): p_r_map[(k, lri)] = ori - self.process_run_index_map = p_r_map - def update_inverse_process_to_run_index_map(self): - p_r_map_inv = dict([(i, []) for i in set(self.process_run_index_map.values())]) - for k, v in self.process_run_index_map.items(): + # and the inverse + p_r_map_inv = dict([(i, []) for i in set(p_r_map.values())]) + for k, v in p_r_map.items(): p_r_map_inv[v].append(k) + + self.process_run_index_map = p_r_map self.inverse_process_to_run_index_map = p_r_map_inv + def dispatch_to_processes(self, dispatch_function, *args): + return [dispatch_function(i, *args) for i in range(len(self.dispatch_configuration))] + class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): From 52bde4f2d5b1d579ccc59c72b2d8614f6a427f96 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:25:50 +0200 Subject: [PATCH 021/174] Remove obsolete generate_run_timing_interval_map() method --- opengate/managers.py | 57 +++++++------------------------------------- 1 file changed, 8 insertions(+), 49 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index f8d0bd216..0baf77138 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1695,42 +1695,13 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def generate_run_timing_interval_map(self, number_of_processes): - if number_of_processes % len(self.run_timing_intervals) != 0: - fatal( - "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}." - ) - - number_of_processes_per_run = int( - number_of_processes / len(self.run_timing_intervals) - ) - run_timing_interval_map = {} - process_index = 0 - for i, rti in enumerate(self.run_timing_intervals): - t_start, t_end = rti - duration_original = t_end - t_start - duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [ - t_start + (j + 1) * duration_in_process - for j in range(number_of_processes_per_run - 1) - ] - t_all = [t_start] + t_intermediate + [t_end] - for t_s, t_e in zip(t_all[:-1], t_all[1:]): - run_timing_interval_map[process_index] = { - "run_timing_intervals": [[t_s, t_e]], - "lut_original_rti": [i], - } - process_index += 1 - return run_timing_interval_map - def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") + self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in @@ -1739,13 +1710,11 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): for vol in self.volume_manager.dynamic_volumes: vol.reassign_subset_of_dynamic_params(lut_original_rti) print(process_index) - print(f"Volume {vol.name}:") + print(f'Volume {vol.name}:') print(vol.user_info["dynamic_params"]) output = self._run_simulation_engine(False, process_index=process_index) - print( - process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti - ) + print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) return output def run(self, start_new_process=False, number_of_sub_processes=0): @@ -1786,9 +1755,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0): source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map( - number_of_sub_processes - ) + run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) try: multiprocessing.set_start_method("spawn") except RuntimeError: @@ -1797,21 +1764,13 @@ def run(self, start_new_process=False, number_of_sub_processes=0): # q = multiprocessing.Queue() with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [ - pool.apply_async( - self.run_in_process, - ( - k, - v["run_timing_intervals"], - v["lut_original_rti"], - ), - ) - for k, v in run_timing_interval_map.items() - ] + results = [pool.apply_async(self.run_in_process, + (k, v['run_timing_intervals'], v['lut_original_rti'],)) + for k, v in run_timing_interval_map.items()] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f"list_of_output: {list_of_output}") + print(f'list_of_output: {list_of_output}') return list_of_output # processes = [] # for k, v in run_timing_interval_map.items(): From 3d1448bea3d222cff946e62762e554167e3463d8 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:26:06 +0200 Subject: [PATCH 022/174] Update imports in managers.py --- opengate/managers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 0baf77138..5277b2aba 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -47,7 +47,7 @@ ) from .userinfo import UserInfo from .serialization import dump_json, dumps_json, loads_json, load_json -from .processing import dispatch_to_subprocess +from .processing import dispatch_to_subprocess, MultiProcessingHandlerEqualPerRunTimingInterval from .geometry.volumes import ( VolumeBase, @@ -1695,7 +1695,7 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): + def run_in_process(self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. From fc0d1556a88028c43722ed207ef16e07f1427599 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:26:37 +0200 Subject: [PATCH 023/174] Update run_in_process() --- opengate/managers.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 5277b2aba..c86d51176 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1705,16 +1705,26 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in - self.run_timing_intervals = run_timing_intervals + self.run_timing_intervals = multi_process_handler.get_run_timing_intervals_for_process(process_index) # adapt all dynamic volumes for vol in self.volume_manager.dynamic_volumes: - vol.reassign_subset_of_dynamic_params(lut_original_rti) + vol.reassign_dynamic_params_for_process( + multi_process_handler.get_original_run_timing_indices_for_process(process_index) + ) print(process_index) print(f'Volume {vol.name}:') print(vol.user_info["dynamic_params"]) - output = self._run_simulation_engine(False, process_index=process_index) - print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + if avoid_write_to_disk_in_subprocess is True: + for actor in self.actor_manager.actors.values(): + actor.write_to_disk = False + + output = self._run_simulation_engine(True, process_index=process_index) + print(process_index, + os.getpid(), + id(self), + multi_process_handler.get_run_timing_intervals_for_process(process_index), + multi_process_handler.get_original_run_timing_indices_for_process(process_index)) return output def run(self, start_new_process=False, number_of_sub_processes=0): From 8b43fb26838007572adc5f1b9d5096b62a9f8209 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:27:06 +0200 Subject: [PATCH 024/174] Store number_of_sub_processes and start_new_process in simulation_meta_data --- opengate/managers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index c86d51176..6cc6e88fd 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1738,6 +1738,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0): if number_of_sub_processes == 1: start_new_process = True + self.meta_data.number_of_sub_processes = number_of_sub_processes + self.meta_data.start_new_process = start_new_process + # prepare sub process if start_new_process is True: """Important: put: From a544da9dba39b62957e3f5d86d4800e1a80f6ca8 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:27:54 +0200 Subject: [PATCH 025/174] Introduce avoid_write_to_disk_in_subprocess kwarg in Simulation.run() --- opengate/managers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 6cc6e88fd..386ce60c1 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1727,7 +1727,7 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di multi_process_handler.get_original_run_timing_indices_for_process(process_index)) return output - def run(self, start_new_process=False, number_of_sub_processes=0): + def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1778,8 +1778,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0): with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO results = [pool.apply_async(self.run_in_process, - (k, v['run_timing_intervals'], v['lut_original_rti'],)) - for k, v in run_timing_interval_map.items()] + (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] From 19c9b1734743f4f80e6ecb6f7234cd4587f8cb33 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:29:09 +0200 Subject: [PATCH 026/174] Use multi_proc_handler in Simulation.run() --- opengate/managers.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 386ce60c1..32e4c5dd4 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1768,14 +1768,17 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', + simulation=self, + number_of_processes=number_of_sub_processes) + multi_proc_handler.initialize() try: multiprocessing.set_start_method("spawn") except RuntimeError: print("Could not set start method 'spawn'.") pass # q = multiprocessing.Queue() - with multiprocessing.Pool(len(run_timing_interval_map)) as pool: + with multiprocessing.Pool(number_of_sub_processes) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO results = [pool.apply_async(self.run_in_process, (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] From 23fb55e1d50e3d9491352d3f93468dda362b75b4 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:29:59 +0200 Subject: [PATCH 027/174] Trigger import_user_output_from_actor() after multi_proc run --- opengate/managers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 32e4c5dd4..60fc96dda 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1787,6 +1787,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to list_of_output = [res.get() for res in results] print(f'list_of_output: {list_of_output}') return list_of_output + + for actor in self.actor_manager.actors.values(): + actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) # processes = [] # for k, v in run_timing_interval_map.items(): # p = multiprocessing.Process( From a1ce4764512a373a9d831d990f8a8311e4c6a7fd Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:31:14 +0200 Subject: [PATCH 028/174] store meta_data after run --- opengate/managers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 60fc96dda..c16cbaed1 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1767,6 +1767,8 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to source.fTotalSkippedEvents = s.user_info.fTotalSkippedEvents source.fTotalZeroEvents = s.user_info.fTotalZeroEvents + self.meta_data.import_from_simulation_output(output) + elif number_of_sub_processes > 1: multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', simulation=self, @@ -1790,6 +1792,10 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) + + self.meta_data.import_from_simulation_output(*list_of_output) + for i, o in enumerate(list_of_output): + self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) # processes = [] # for k, v in run_timing_interval_map.items(): # p = multiprocessing.Process( @@ -1810,6 +1816,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. output = self._run_simulation_engine(False) + self.meta_data.import_from_simulation_output(output) self._user_warnings.extend(output.warnings) From a1209875bd27994edde77e09921ee03bf88de795 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:32:45 +0200 Subject: [PATCH 029/174] Get parameters from sources after multiproc run --- opengate/managers.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index c16cbaed1..3222729b6 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1812,6 +1812,21 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # except queue.Empty: # fatal("The queue is empty. The spawned process probably died.") # return output + + # FIXME: temporary workaround to collect extra info from output + # will be implemented similar to actor.import_user_output_from_actor after source refactoring + for source in self.source_manager.user_info_sources.values(): + for o in list_of_output: + try: + s = o.get_source(source.name) + except: + continue + if "fTotalSkippedEvents" in s.user_info.__dict__: + if not hasattr(source, "fTotalSkippedEvents"): + source.fTotalSkippedEvents = 0 + source.fTotalZeroEvents = 0 + source.fTotalSkippedEvents += s.user_info.fTotalSkippedEvents + source.fTotalZeroEvents += s.user_info.fTotalZeroEvents else: # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. From 78bcd34e5be4320dfd3ff8b4b378f2a5db0a4b8a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:33:00 +0200 Subject: [PATCH 030/174] Remove obsolete code --- opengate/managers.py | 29 ++++++----------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 3222729b6..b3e642cad 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1788,7 +1788,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to print(results[0]) # DEMO list_of_output = [res.get() for res in results] print(f'list_of_output: {list_of_output}') - return list_of_output for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) @@ -1796,22 +1795,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) - # processes = [] - # for k, v in run_timing_interval_map.items(): - # p = multiprocessing.Process( - # target=target_func, - # args=(q, self.run_in_process, k, v['run_timing_intervals'], v['lut_original_rti']) - # ) - # p.start() - # processes.append(p) - # for p in processes: - # p.join() # (timeout=10) # timeout might be needed - # - # try: - # output = q.get(block=False) - # except queue.Empty: - # fatal("The queue is empty. The spawned process probably died.") - # return output # FIXME: temporary workaround to collect extra info from output # will be implemented similar to actor.import_user_output_from_actor after source refactoring @@ -1833,14 +1816,14 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - self._user_warnings.extend(output.warnings) + # self._user_warnings.extend(output.warnings) - # FIXME workaround - self.expected_number_of_events = output.expected_number_of_events + # # FIXME workaround + # self.expected_number_of_events = output.expected_number_of_events + + # self.user_hook_log = output.user_hook_log + # self._current_random_seed = output.current_random_seed - # store the hook log - self.user_hook_log = output.user_hook_log - self._current_random_seed = output.current_random_seed if self.store_json_archive is True: self.to_json_file() From 39a45c8c1add8640b9fe157c9a55893be7eb700f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:34:31 +0200 Subject: [PATCH 031/174] Trigger FinalizeSimulation() at the end of a run --- opengate/managers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index b3e642cad..5601e808b 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1824,6 +1824,8 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # self.user_hook_log = output.user_hook_log # self._current_random_seed = output.current_random_seed + for actor in self.actor_manager.actors.values(): + actor.FinalizeSimulation() if self.store_json_archive is True: self.to_json_file() From 7e47becc3fb880a439486c4aea2e94ad1a6a003f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:20 +0200 Subject: [PATCH 032/174] remove obsolete code --- opengate/managers.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 5601e808b..314f6428d 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1816,14 +1816,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - # self._user_warnings.extend(output.warnings) - - # # FIXME workaround - # self.expected_number_of_events = output.expected_number_of_events - - # self.user_hook_log = output.user_hook_log - # self._current_random_seed = output.current_random_seed - for actor in self.actor_manager.actors.values(): actor.FinalizeSimulation() From b8581ce7ed923a5db27ef7216fbdfb0925db263b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:33 +0200 Subject: [PATCH 033/174] Add test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 126 ++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100755 opengate/tests/src/test008_dose_actor_multiproc.py diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py new file mode 100755 index 000000000..5cf6099b6 --- /dev/null +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import opengate as gate +from opengate.tests import utility +from scipy.spatial.transform import Rotation +from pathlib import Path + +if __name__ == "__main__": + paths = utility.get_default_test_paths(__file__, "gate_test008_dose_actor") + ref_path = paths.gate_output + + # create the simulation + sim = gate.Simulation() + + # main options + sim.g4_verbose = False + sim.g4_verbose_level = 1 + sim.visu = False + sim.random_seed = 12345678 + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem + + # shortcuts for units + m = gate.g4_units.m + cm = gate.g4_units.cm + + # change world size + world = sim.world + world.size = [1 * m, 1 * m, 1 * m] + + # add a simple fake volume to test hierarchy + # translation and rotation like in the Gate macro + fake = sim.add_volume("Box", "fake") + fake.size = [40 * cm, 40 * cm, 40 * cm] + fake.translation = [1 * cm, 2 * cm, 3 * cm] + fake.rotation = Rotation.from_euler("x", 10, degrees=True).as_matrix() + fake.material = "G4_AIR" + fake.color = [1, 0, 1, 1] + + # waterbox + waterbox = sim.add_volume("Box", "waterbox") + waterbox.mother = "fake" + waterbox.size = [10 * cm, 10 * cm, 10 * cm] + waterbox.translation = [-3 * cm, -2 * cm, -1 * cm] + waterbox.rotation = Rotation.from_euler("y", 20, degrees=True).as_matrix() + waterbox.material = "G4_WATER" + waterbox.color = [0, 0, 1, 1] + + # physics + sim.physics_manager.physics_list_name = "QGSP_BERT_EMV" + sim.physics_manager.enable_decay = False + sim.physics_manager.apply_cuts = True # default + um = gate.g4_units.um + global_cut = 700 * um + sim.physics_manager.global_production_cuts.gamma = global_cut + sim.physics_manager.global_production_cuts.electron = global_cut + sim.physics_manager.global_production_cuts.positron = global_cut + sim.physics_manager.global_production_cuts.proton = global_cut + + # default source for tests + source = sim.add_source("GenericSource", "mysource") + MeV = gate.g4_units.MeV + Bq = gate.g4_units.Bq + source.energy.mono = 150 * MeV + nm = gate.g4_units.nm + source.particle = "proton" + source.position.type = "disc" + source.position.radius = 1 * nm + source.direction.type = "momentum" + source.direction.momentum = [0, 0, 1] + source.activity = 50000 * Bq + + # add dose actor + dose = sim.add_actor("DoseActor", "dose") + dose.attached_to = "waterbox" + dose.size = [99, 99, 99] + mm = gate.g4_units.mm + dose.spacing = [2 * mm, 2 * mm, 2 * mm] + dose.translation = [2 * mm, 3 * mm, -2 * mm] + dose.edep_uncertainty.active = True + dose.hit_type = "random" + dose.output_coordinate_system = "local" + dose.output_filename = "test.nii.gz" + + # add stat actor + stat = sim.add_actor("SimulationStatisticsActor", "Stats") + stat.track_types_flag = True + + # start simulation + sim.run(number_of_sub_processes=4 ) + + # # print results at the end + # print(stat) + # print(dose) + # + # # tests + # stats_ref = utility.read_stat_file(ref_path / "stat.txt") + # is_ok = utility.assert_stats(stat, stats_ref, 0.11) + # + # print("\nDifference for EDEP") + # is_ok = ( + # utility.assert_images( + # ref_path / "output-Edep.mhd", + # dose.edep.get_output_path(), + # stat, + # tolerance=13, + # ignore_value=0, + # sum_tolerance=1, + # ) + # and is_ok + # ) + # + # print("\nDifference for uncertainty") + # is_ok = ( + # utility.assert_images( + # ref_path / "output-Edep-Uncertainty.mhd", + # dose.edep_uncertainty.get_output_path(), + # stat, + # tolerance=30, + # ignore_value=1, + # sum_tolerance=1, + # ) + # and is_ok + # ) + # + # utility.test_ok(is_ok) From 3d4fe1e7d0d1256b3337881c4ff1b9954165860b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:51 +0200 Subject: [PATCH 034/174] Update test080_multiprocessing_1.py --- opengate/tests/src/test080_multiprocessing_1.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index d79819b2e..90bf61400 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -13,6 +13,7 @@ sim = gate.Simulation() sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output + sim.store_json_archive = True box1 = sim.add_volume("BoxVolume", "box1") box1.add_dynamic_parametrisation( @@ -20,8 +21,6 @@ ) n_proc = 4 * len(sim.run_timing_intervals) - run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) - print(run_timing_interval_map) output = sim.run(number_of_sub_processes=n_proc) @@ -31,5 +30,5 @@ print(f"ID of the main sim: {id(sim)}") - ids = [e[2] for e in output] + ids = [o.simulation_id for o in output] assert id(sim) not in ids From dcebecc95d1863a6049c778eb97927969c6cb684 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:36:49 +0200 Subject: [PATCH 035/174] Update GateObject to rename property 'warnings' in Simulation --- opengate/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/base.py b/opengate/base.py index d0de201cd..b7d8592ae 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -642,7 +642,7 @@ def warn_user(self, message): self._temporary_warning_cache.append(message) # if possible, register the warning directly else: - self.simulation._user_warnings.append(message) + self.simulation.warnings.append(message) warning(message) From c804709a8d764616bdc9cbb18821b41d8c071334 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:37:22 +0200 Subject: [PATCH 036/174] Update run_engine to use Simulation.meta_data --- opengate/engines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/engines.py b/opengate/engines.py index f90adf1bf..718a72984 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1138,7 +1138,7 @@ def run_engine(self): # because everything else has already been executed in the main process # and potential warnings have already been registered. if self.new_process is True: - self.simulation.reset_warnings() + self.simulation.meta_data.reset_warnings() # initialization self.initialize() From 27b49c6b5f7622073ad1d639cac2de5e8610c080 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 00:39:23 +0000 Subject: [PATCH 037/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/actors/actoroutput.py | 15 +++- opengate/actors/base.py | 10 ++- opengate/engines.py | 4 +- opengate/managers.py | 69 +++++++++++++------ opengate/processing.py | 42 ++++++----- .../tests/src/test008_dose_actor_multiproc.py | 2 +- .../src/test080_multiprocessing_handler.py | 18 +++-- 7 files changed, 105 insertions(+), 55 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index ea59795dd..53ad86634 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -423,13 +423,22 @@ def import_data_from_actor_output(self, *actor_output, discard_existing_data=Tru run_indices_to_import = set() for ao in actor_output: run_indices_to_import.union(ao.data_per_run.keys()) - which_output_per_run_index = dict([(r, [ao for ao in actor_output if r in ao.data_per_run]) for r in run_indices_to_import]) + which_output_per_run_index = dict( + [ + (r, [ao for ao in actor_output if r in ao.data_per_run]) + for r in run_indices_to_import + ] + ) for r in run_indices_to_import: - data_to_import = [ao.data_per_run[r] for ao in which_output_per_run_index[r]] + data_to_import = [ + ao.data_per_run[r] for ao in which_output_per_run_index[r] + ] if discard_existing_data is False and r in self.data_per_run: data_to_import.append(self.data_per_run[r]) self.data_per_run[r] = merge_data(data_to_import) - merged_data_to_import = [ao.merged_data for ao in actor_output if ao.merged_data is not None] + merged_data_to_import = [ + ao.merged_data for ao in actor_output if ao.merged_data is not None + ] if discard_existing_data is False and self.merged_data is not None: merged_data_to_import.append(self.merged_data) if len(merged_data_to_import) > 0: diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 826f3452d..2e1d09691 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -431,10 +431,14 @@ def import_user_output_from_actor(self, *actor): else: for k in self.user_output: try: - self.user_output[k].import_data_from_actor_output(*[a.user_output[k] for a in actor]) + self.user_output[k].import_data_from_actor_output( + *[a.user_output[k] for a in actor] + ) except NotImplementedError: - self.warn_user(f"User output {k} in {self.type_name} cannot be imported " - f"because the function is not yet implemented for this type of output.") + self.warn_user( + f"User output {k} in {self.type_name} cannot be imported " + f"because the function is not yet implemented for this type of output." + ) def store_output_data(self, output_name, run_index, *data): self._assert_output_exists(output_name) diff --git a/opengate/engines.py b/opengate/engines.py index 718a72984..48400c1ab 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -920,7 +920,9 @@ def store_output_from_simulation_engine(self, simulation_engine): self.store_sources(simulation_engine) self.store_hook_log(simulation_engine) self.current_random_seed = simulation_engine.current_random_seed - self.expected_number_of_events = simulation_engine.source_engine.expected_number_of_events + self.expected_number_of_events = ( + simulation_engine.source_engine.expected_number_of_events + ) self.warnings = simulation_engine.simulation.warnings self.simulation_id = id(simulation_engine.simulation) diff --git a/opengate/managers.py b/opengate/managers.py index 314f6428d..7e493ab9d 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -47,7 +47,10 @@ ) from .userinfo import UserInfo from .serialization import dump_json, dumps_json, loads_json, load_json -from .processing import dispatch_to_subprocess, MultiProcessingHandlerEqualPerRunTimingInterval +from .processing import ( + dispatch_to_subprocess, + MultiProcessingHandlerEqualPerRunTimingInterval, +) from .geometry.volumes import ( VolumeBase, @@ -1493,7 +1496,9 @@ def __getattr__(self, item): try: return self.meta_data[item] except KeyError: - raise AttributeError(f"Item {item} not found in {type(self)}, nor in the simulation meta data. ") + raise AttributeError( + f"Item {item} not found in {type(self)}, nor in the simulation meta data. " + ) def __str__(self): s = ( @@ -1695,24 +1700,30 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def run_in_process(self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess): + def run_in_process( + self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess + ): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in - self.run_timing_intervals = multi_process_handler.get_run_timing_intervals_for_process(process_index) + self.run_timing_intervals = ( + multi_process_handler.get_run_timing_intervals_for_process(process_index) + ) # adapt all dynamic volumes for vol in self.volume_manager.dynamic_volumes: vol.reassign_dynamic_params_for_process( - multi_process_handler.get_original_run_timing_indices_for_process(process_index) + multi_process_handler.get_original_run_timing_indices_for_process( + process_index + ) ) print(process_index) - print(f'Volume {vol.name}:') + print(f"Volume {vol.name}:") print(vol.user_info["dynamic_params"]) if avoid_write_to_disk_in_subprocess is True: @@ -1720,14 +1731,23 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print(process_index, - os.getpid(), - id(self), - multi_process_handler.get_run_timing_intervals_for_process(process_index), - multi_process_handler.get_original_run_timing_indices_for_process(process_index)) + print( + process_index, + os.getpid(), + id(self), + multi_process_handler.get_run_timing_intervals_for_process(process_index), + multi_process_handler.get_original_run_timing_indices_for_process( + process_index + ), + ) return output - def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True): + def run( + self, + start_new_process=False, + number_of_sub_processes=0, + avoid_write_to_disk_in_subprocess=True, + ): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1770,9 +1790,11 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to self.meta_data.import_from_simulation_output(output) elif number_of_sub_processes > 1: - multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', - simulation=self, - number_of_processes=number_of_sub_processes) + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( + name="multi_proc_handler", + simulation=self, + number_of_processes=number_of_sub_processes, + ) multi_proc_handler.initialize() try: multiprocessing.set_start_method("spawn") @@ -1782,15 +1804,22 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # q = multiprocessing.Queue() with multiprocessing.Pool(number_of_sub_processes) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [pool.apply_async(self.run_in_process, - (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] + results = [ + pool.apply_async( + self.run_in_process, + (multi_proc_handler, i, avoid_write_to_disk_in_subprocess), + ) + for i in range(number_of_sub_processes) + ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f'list_of_output: {list_of_output}') + print(f"list_of_output: {list_of_output}") for actor in self.actor_manager.actors.values(): - actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) + actor.import_user_output_from_actor( + *[o.get_actor(actor.name) for o in list_of_output] + ) self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): diff --git a/opengate/processing.py b/opengate/processing.py index 27ac14901..44b8e64a2 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -4,6 +4,7 @@ from .exception import fatal from .base import GateObject + # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): q.put(f(*args, **kwargs)) @@ -41,13 +42,13 @@ def _setter_hook_number_of_processes(self, number_of_processes): class MultiProcessingHandlerBase(GateObject): user_info_defaults = { - 'number_of_processes': ( + "number_of_processes": ( 1, { "doc": "In how many parallel process should the simulation be run? " - "Must be a multiple of the number of run timing intervals. ", + "Must be a multiple of the number of run timing intervals. ", "setter_hook": _setter_hook_number_of_processes, - } + }, ) } @@ -78,22 +79,21 @@ def initialize(self): self.generate_dispatch_configuration() def get_original_run_timing_indices_for_process(self, process_index): - return self.dispatch_configuration[process_index]['lut_original_rti'] + return self.dispatch_configuration[process_index]["lut_original_rti"] def get_run_timing_intervals_for_process(self, process_index): - return self.dispatch_configuration[process_index]['run_timing_intervals'] + return self.dispatch_configuration[process_index]["run_timing_intervals"] def generate_dispatch_configuration(self): raise NotImplementedError def update_process_to_run_index_maps(self): - """Creates a mapping (process index, local run index) -> (original run index) - """ + """Creates a mapping (process index, local run index) -> (original run index)""" self.assert_dispatch_configuration() p_r_map = {} for k, v in self.dispatch_configuration.items(): - for lri, ori in enumerate(v['lut_original_rti']): + for lri, ori in enumerate(v["lut_original_rti"]): p_r_map[(k, lri)] = ori # and the inverse @@ -105,32 +105,40 @@ def update_process_to_run_index_maps(self): self.inverse_process_to_run_index_map = p_r_map_inv def dispatch_to_processes(self, dispatch_function, *args): - return [dispatch_function(i, *args) for i in range(len(self.dispatch_configuration))] + return [ + dispatch_function(i, *args) for i in range(len(self.dispatch_configuration)) + ] class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): def generate_dispatch_configuration(self): if self.number_of_processes % len(self.original_run_timing_intervals) != 0: - fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}.") + fatal( + "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}." + ) - number_of_processes_per_run = int(self.number_of_processes / len(self.original_run_timing_intervals)) + number_of_processes_per_run = int( + self.number_of_processes / len(self.original_run_timing_intervals) + ) dispatch_configuration = {} process_index = 0 for i, rti in enumerate(self.original_run_timing_intervals): t_start, t_end = rti duration_original = t_end - t_start duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_intermediate = [ + t_start + (j + 1) * duration_in_process + for j in range(number_of_processes_per_run - 1) + ] t_all = [t_start] + t_intermediate + [t_end] for t_s, t_e in zip(t_all[:-1], t_all[1:]): dispatch_configuration[process_index] = { - 'run_timing_intervals': [[t_s, t_e]], - 'lut_original_rti': [i], - 'process_id': None + "run_timing_intervals": [[t_s, t_e]], + "lut_original_rti": [i], + "process_id": None, } process_index += 1 self.dispatch_configuration = dispatch_configuration return dispatch_configuration - diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 5cf6099b6..e166c4bbc 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -87,7 +87,7 @@ stat.track_types_flag = True # start simulation - sim.run(number_of_sub_processes=4 ) + sim.run(number_of_sub_processes=4) # # print results at the end # print(stat) diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py index daabbff61..db1d18e9f 100755 --- a/opengate/tests/src/test080_multiprocessing_handler.py +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -6,11 +6,8 @@ from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s @@ -18,12 +15,13 @@ sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output - box1 = sim.add_volume('BoxVolume', 'box1') - box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + box1 = sim.add_volume("BoxVolume", "box1") + box1.add_dynamic_parametrisation( + translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))] + ) n_proc = 4 * len(sim.run_timing_intervals) - multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', - simulation=sim, - number_of_processes=n_proc) - + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( + name="multi_proc_handler", simulation=sim, number_of_processes=n_proc + ) From 0a25f0d74f1be935af23a907371ec16135d64105 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 23:33:44 +0200 Subject: [PATCH 038/174] Rename FinalizeSimulation() to EndOfMultiProcessAction() --- opengate/actors/base.py | 2 +- opengate/actors/doseactors.py | 2 +- opengate/managers.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 2e1d09691..edbb4f44e 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -469,5 +469,5 @@ def EndSimulationAction(self): """Default virtual method for inheritance""" pass - def FinalizeSimulation(self): + def EndOfMultiProcessAction(self): pass diff --git a/opengate/actors/doseactors.py b/opengate/actors/doseactors.py index 9f6c15c8d..435f6ead9 100644 --- a/opengate/actors/doseactors.py +++ b/opengate/actors/doseactors.py @@ -254,7 +254,7 @@ def inform_user_output_about_end(self): def EndSimulationAction(self): self.inform_user_output_about_end() - def FinalizeSimulation(self): + def EndOfMultiProcessAction(self): self.inform_user_output_about_end() diff --git a/opengate/managers.py b/opengate/managers.py index 7e493ab9d..5522e96ed 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1821,6 +1821,9 @@ def run( *[o.get_actor(actor.name) for o in list_of_output] ) + for actor in self.actor_manager.actors.values(): + actor.EndOfMultiProcessAction() + self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) @@ -1845,9 +1848,6 @@ def run( output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - for actor in self.actor_manager.actors.values(): - actor.FinalizeSimulation() - if self.store_json_archive is True: self.to_json_file() From c4cf4a45eb8e90e68e869007784f7a84be0914d8 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 08:47:05 +0200 Subject: [PATCH 039/174] Implement explicit inplace_merge_with in ItkImageDataItem --- opengate/actors/dataitems.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 5cc4a9600..6fe32c65d 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -268,6 +268,12 @@ def __itruediv__(self, other): self.set_data(divide_itk_images(self.data, other.data)) return self + def inplace_merge_with(self, *other): + data_to_merge = [self.data] + [o.data for o in other] + if self.data is not None: + data_to_merge += [self.data] + self.data = sum_itk_images(data_to_merge) + def set_image_properties(self, **properties): if not self.data_is_none: if "spacing" in properties and properties["spacing"] is not None: From d94c0e4181509365e72d9adb11e6e7b5e0748ff6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 08:47:58 +0200 Subject: [PATCH 040/174] Adapt merge_data to try to accelerate it (WIP) --- opengate/actors/dataitems.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 6fe32c65d..013225785 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -754,8 +754,13 @@ class QuotientMeanItkImage(QuotientItkImage): def merge_data(list_of_data): merged_data = list_of_data[0] - for d in list_of_data[1:]: - merged_data.inplace_merge_with(d) + try: + print(f"DEBUG type(merged_data) = '{type(merged_data)}'") + merged_data.inplace_merge_with(*list_of_data[1:]) + except: + print("DEBUG went into except block") + for d in list_of_data[1:]: + merged_data.inplace_merge_with(d) return merged_data From 7d919d8b11b879d080a6ebde7d71eb5506f95720 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 22:34:32 +0200 Subject: [PATCH 041/174] Implement sum_itk_images based on SimpleITK (workaround) --- opengate/image.py | 55 +++++++++++++++++++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/opengate/image.py b/opengate/image.py index 010aadb24..82cbfda59 100644 --- a/opengate/image.py +++ b/opengate/image.py @@ -12,6 +12,8 @@ ) from .definitions import __gate_list_objects__ +import SimpleITK as sitk + def update_image_py_to_cpp(py_img, cpp_img, copy_data=False): cpp_img.set_size(py_img.GetLargestPossibleRegion().GetSize()) @@ -354,17 +356,48 @@ def divide_itk_images( imgarrOut.CopyInformation(img1_numerator) return imgarrOut - -def sum_itk_images(images): - image_type = type(images[0]) - add_image_filter = itk.AddImageFilter[image_type, image_type, image_type].New() - output = images[0] - for img in images[1:]: - add_image_filter.SetInput1(output) - add_image_filter.SetInput2(img) - add_image_filter.Update() - output = add_image_filter.GetOutput() - return output +# IMPLEMENTATION BASED ON ITK +# def sum_itk_images(images): +# image_type = type(images[0]) +# add_image_filter = itk.AddImageFilter[image_type, image_type, image_type].New() +# output = images[0] +# for img in images[1:]: +# add_image_filter.SetInput1(output) +# add_image_filter.SetInput2(img) +# add_image_filter.Update() +# output = add_image_filter.GetOutput() +# return output + + +def itk_to_sitk(itk_image): + array = itk.GetArrayFromImage(itk_image) + sitk_image = sitk.GetImageFromArray(array) + sitk_image.SetOrigin(np.array(itk_image.GetOrigin())) + sitk_image.SetSpacing(np.array(itk_image.GetSpacing())) + sitk_image.SetDirection(np.array(itk_image.GetDirection()).flatten()) + return sitk_image + + +def sitk_to_itk(sitk_image): + array = sitk.GetArrayFromImage(sitk_image) # Convert SimpleITK image to NumPy array + itk_image = itk.GetImageFromArray(array) # Convert NumPy array to ITK image + + # Set the metadata from SimpleITK to ITK image + itk_image.SetOrigin(np.array(sitk_image.GetOrigin())) + itk_image.SetSpacing(np.array(sitk_image.GetSpacing())) + itk_image.SetDirection(np.array(sitk_image.GetDirection()).reshape(3, 3)) + + return itk_image + + +def sum_itk_images(itk_image_list): + if not itk_image_list: + raise ValueError("The image list is empty.") + summed_image = itk_to_sitk(itk_image_list[0]) + for itk_image in itk_image_list[1:]: + sitk_image = itk_to_sitk(itk_image) + summed_image = sitk.Add(summed_image, sitk_image) + return sitk_to_itk(summed_image) def multiply_itk_images(images): From 7bfa8b7055ec7a352745f03c427552ff8b802c55 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 22:35:26 +0200 Subject: [PATCH 042/174] remove debug prints and clean code --- opengate/managers.py | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 5522e96ed..2641265b4 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1709,7 +1709,6 @@ def run_in_process( # adapt the output_dir self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") - print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in self.run_timing_intervals = ( @@ -1722,24 +1721,13 @@ def run_in_process( process_index ) ) - print(process_index) - print(f"Volume {vol.name}:") - print(vol.user_info["dynamic_params"]) if avoid_write_to_disk_in_subprocess is True: for actor in self.actor_manager.actors.values(): actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print( - process_index, - os.getpid(), - id(self), - multi_process_handler.get_run_timing_intervals_for_process(process_index), - multi_process_handler.get_original_run_timing_indices_for_process( - process_index - ), - ) + print(f"run_in_process finished in process {process_index}") return output def run( @@ -1803,7 +1791,6 @@ def run( pass # q = multiprocessing.Queue() with multiprocessing.Pool(number_of_sub_processes) as pool: - print("pool._outqueue: ", pool._outqueue) # DEMO results = [ pool.apply_async( self.run_in_process, @@ -1812,13 +1799,13 @@ def run( for i in range(number_of_sub_processes) ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object - print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f"list_of_output: {list_of_output}") + log.info("End of multiprocessing") + # loop over actors in original simulation for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor( - *[o.get_actor(actor.name) for o in list_of_output] + *[o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process ) for actor in self.actor_manager.actors.values(): From 11f67400de16e4f939febeee80a71835e43847fb Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 23:58:11 +0200 Subject: [PATCH 043/174] In BaseUserInterfaceToActorOutput.__getstate__: always use return_dict = self.__dict__.copy() --- opengate/actors/actoroutput.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 53ad86634..6062eb686 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -46,11 +46,12 @@ def __getstate__(self): For earlier python version (<3.11), __getstate__ may not be defined. We provide a simple workaround here to return a copy of the internal dict. """ - try: - return_dict = super().__getstate__() - except AttributeError: - # If there is no superclass with __getstate__, use self.__dict__ - return_dict = self.__dict__.copy() + # try: + # return_dict = super().__getstate__() + # except AttributeError: + # # If there is no superclass with __getstate__, use self.__dict__ + # return_dict = self.__dict__.copy() + return_dict = self.__dict__.copy() # Safely remove 'belongs_to_actor' if it exists return_dict.pop("belongs_to_actor", None) return return_dict From 13830579086602a1e7340db7c217ebe5402d7459 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:02 +0200 Subject: [PATCH 044/174] Implement reset_user_output() and reset_data() --- opengate/actors/actoroutput.py | 4 ++++ opengate/actors/base.py | 4 ++++ opengate/actors/miscactors.py | 2 ++ opengate/managers.py | 6 ++++++ 4 files changed, 16 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 6062eb686..bf1544dc3 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -344,6 +344,10 @@ def get_output_path(self, which="merged", **kwargs): def get_output_path_as_string(self, **kwargs): return ensure_filename_is_str(self.get_output_path(**kwargs)) + def reset_data(self): + self.merged_data = None + self.data_per_run = {} + def close(self): if self.keep_data_in_memory is False: self.data_per_run = {} diff --git a/opengate/actors/base.py b/opengate/actors/base.py index edbb4f44e..84777e370 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -194,6 +194,10 @@ def get_data(self, name=None, **kwargs): f" Example: my_actor.{list(self.interfaces_to_user_output.keys())[0]}.get_data(). " ) + def reset_user_output(self): + for v in self.user_output.values(): + v.reset_data() + # *** shortcut properties *** @property @shortcut_for_single_output_actor diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index 1396dfb89..90a6ec9d5 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -58,6 +58,8 @@ def __init__(self, *args, **kwargs): # predefine the merged_data self.merged_data = Box() + + def reset_data(self): self.merged_data.runs = 0 self.merged_data.events = 0 self.merged_data.tracks = 0 diff --git a/opengate/managers.py b/opengate/managers.py index 2641265b4..871a083b6 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1757,6 +1757,9 @@ def run( https://britishgeologicalsurvey.github.io/science/python-forking-vs-spawn/ """ + for actor in self.actor_manager.actors.values(): + actor.reset_user_output() + log.info("Dispatching simulation to subprocess ...") output = dispatch_to_subprocess(self._run_simulation_engine, True) @@ -1790,6 +1793,9 @@ def run( print("Could not set start method 'spawn'.") pass # q = multiprocessing.Queue() + for actor in self.actor_manager.actors.values(): + actor.reset_user_output() + with multiprocessing.Pool(number_of_sub_processes) as pool: results = [ pool.apply_async( From 6bba1b7f9b92d524e73cd97ff3560eb5aa8fa20a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:36 +0200 Subject: [PATCH 045/174] remove debug print --- opengate/actors/dataitems.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 013225785..19f2adc99 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -755,10 +755,8 @@ class QuotientMeanItkImage(QuotientItkImage): def merge_data(list_of_data): merged_data = list_of_data[0] try: - print(f"DEBUG type(merged_data) = '{type(merged_data)}'") merged_data.inplace_merge_with(*list_of_data[1:]) except: - print("DEBUG went into except block") for d in list_of_data[1:]: merged_data.inplace_merge_with(d) return merged_data From 56061bc30987573bb10c9da8445da0e74388f41b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:56 +0200 Subject: [PATCH 046/174] Update comment --- opengate/actors/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 84777e370..393b1531b 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -129,8 +129,7 @@ class ActorBase(GateObject): def __init__(self, *args, **kwargs): GateObject.__init__(self, *args, **kwargs) - # this is set by the actor engine during initialization - self.actor_engine = None + self.actor_engine = None # set by the actor engine during initializatio self.user_output = Box() self.interfaces_to_user_output = Box() From aebcc407f622448ad96c38ef0dd43aa6a502df24 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:01:28 +0200 Subject: [PATCH 047/174] Update test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 56 ++++++++++++------- 1 file changed, 35 insertions(+), 21 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index e166c4bbc..5f115e34a 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -5,6 +5,7 @@ from opengate.tests import utility from scipy.spatial.transform import Rotation from pathlib import Path +import time if __name__ == "__main__": paths = utility.get_default_test_paths(__file__, "gate_test008_dose_actor") @@ -18,7 +19,6 @@ sim.g4_verbose_level = 1 sim.visu = False sim.random_seed = 12345678 - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem # shortcuts for units m = gate.g4_units.m @@ -87,28 +87,42 @@ stat.track_types_flag = True # start simulation + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' sim.run(number_of_sub_processes=4) + t2 = time.time() + delta_t_nproc4 = t2 - t1 + + path_edep_nproc4 = dose.edep.get_output_path() + + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.run(number_of_sub_processes=1) + t2 = time.time() + delta_t_nproc1 = t2 - t1 + + path_edep_nproc1 = dose.edep.get_output_path() + + # t1 = time.time() + # sim.run(number_of_sub_processes=0) + # t2 = time.time() + # delta_t_no_subproc = t2 - t1 + + print("Simulation times: ") + print(f"One subprocess: {delta_t_nproc1}") + print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + # print(f"No subprocess: {delta_t_no_subproc}") - # # print results at the end - # print(stat) - # print(dose) - # # # tests - # stats_ref = utility.read_stat_file(ref_path / "stat.txt") - # is_ok = utility.assert_stats(stat, stats_ref, 0.11) - # - # print("\nDifference for EDEP") - # is_ok = ( - # utility.assert_images( - # ref_path / "output-Edep.mhd", - # dose.edep.get_output_path(), - # stat, - # tolerance=13, - # ignore_value=0, - # sum_tolerance=1, - # ) - # and is_ok - # ) + print("\nDifference for EDEP") + is_ok = utility.assert_images( + path_edep_nproc1, + path_edep_nproc4, + stat, + tolerance=13, + ignore_value=0, + sum_tolerance=1, + ) # # print("\nDifference for uncertainty") # is_ok = ( @@ -123,4 +137,4 @@ # and is_ok # ) # - # utility.test_ok(is_ok) + utility.test_ok(is_ok) From 557e6798d16e1c31be714d9867bfcea8381260a3 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:01:56 +0200 Subject: [PATCH 048/174] Update test009_voxels_dynamic.py (not relevant for test result) --- opengate/tests/src/test009_voxels_dynamic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/tests/src/test009_voxels_dynamic.py b/opengate/tests/src/test009_voxels_dynamic.py index 0a1f4995f..ce3dbbd52 100755 --- a/opengate/tests/src/test009_voxels_dynamic.py +++ b/opengate/tests/src/test009_voxels_dynamic.py @@ -93,6 +93,7 @@ # add dose actor dose = sim.add_actor("DoseActor", "dose") dose.output_filename = "test009-edep.mhd" + dose.edep.keep_data_per_run = True dose.attached_to = "patient" dose.size = [99, 99, 99] dose.spacing = [2 * mm, 2 * mm, 2 * mm] From 15d72cd1412850b11065d7166beee52dd61ce37d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 22:02:29 +0000 Subject: [PATCH 049/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/image.py | 1 + opengate/managers.py | 4 +++- .../tests/src/test008_dose_actor_multiproc.py | 20 ++++++++++--------- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/opengate/image.py b/opengate/image.py index 82cbfda59..a9407063e 100644 --- a/opengate/image.py +++ b/opengate/image.py @@ -356,6 +356,7 @@ def divide_itk_images( imgarrOut.CopyInformation(img1_numerator) return imgarrOut + # IMPLEMENTATION BASED ON ITK # def sum_itk_images(images): # image_type = type(images[0]) diff --git a/opengate/managers.py b/opengate/managers.py index 871a083b6..39bad813b 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1811,7 +1811,9 @@ def run( # loop over actors in original simulation for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor( - *[o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process + *[ + o.get_actor(actor.name) for o in list_of_output + ] # these are the actors from the process ) for actor in self.actor_manager.actors.values(): diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 5f115e34a..1591e183f 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -88,7 +88,7 @@ # start simulation t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_4" sim.run(number_of_sub_processes=4) t2 = time.time() delta_t_nproc4 = t2 - t1 @@ -96,7 +96,7 @@ path_edep_nproc4 = dose.edep.get_output_path() t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_1" sim.run(number_of_sub_processes=1) t2 = time.time() delta_t_nproc1 = t2 - t1 @@ -110,18 +110,20 @@ print("Simulation times: ") print(f"One subprocess: {delta_t_nproc1}") - print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + print( + f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}" + ) # print(f"No subprocess: {delta_t_no_subproc}") # # tests print("\nDifference for EDEP") is_ok = utility.assert_images( - path_edep_nproc1, - path_edep_nproc4, - stat, - tolerance=13, - ignore_value=0, - sum_tolerance=1, + path_edep_nproc1, + path_edep_nproc4, + stat, + tolerance=13, + ignore_value=0, + sum_tolerance=1, ) # # print("\nDifference for uncertainty") From 53f5ff07a09e0039ab6056d0dbe475a9933a5507 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:44:39 +0200 Subject: [PATCH 050/174] Correct typo in comment --- opengate/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/base.py b/opengate/base.py index 8a6ae8b8f..4ea43110c 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -703,7 +703,7 @@ def process_dynamic_parametrisation(self, params): extra_params = {} extra_params["auto_changer"] = params.pop( "auto_changer", True - ) # True of key not found (default) + ) # True if key not found (default) if extra_params["auto_changer"] not in (False, True): fatal( f"Received wrong value type for 'auto_changer': got {type(extra_params['auto_changer'])}, " From 14630134b97030b4f35bea280869fd1f33bcbc87 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:45:14 +0200 Subject: [PATCH 051/174] Implement DynamicGateObject.reassign_subset_of_dynamic_params() --- opengate/base.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/opengate/base.py b/opengate/base.py index 4ea43110c..a5f50acae 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -763,6 +763,14 @@ def add_dynamic_parametrisation(self, name=None, **params): s += f"{k}: {v}\n" log.debug(s) + def reassign_subset_of_dynamic_params(self, subset): + # loop over all dynamic parametrisations of this object, + for param in self.user_info["dynamic_params"].values(): + for k, v in param.items(): + # extract the subset of entries to the list that are relevant to this process + if k in self.dynamic_user_info: + param[k] = [v[i] for i in subset] + def create_changers(self): # this base class implementation is here to keep inheritance intact. return [] From b3e8f0c183a1bf028db716d40e05846c165d83d4 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:46:05 +0200 Subject: [PATCH 052/174] Add attribute process_index to SimulationEngine (not used yet) --- opengate/engines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/engines.py b/opengate/engines.py index 42ac8b2e0..16fd96f06 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1016,6 +1016,7 @@ def __init__(self, simulation, new_process=False): # this is only for info. # Process handling is done in Simulation class, not in SimulationEngine! self.new_process = new_process + self.process_index = None # LATER : option to wait the end of completion or not From 0b7569b498d8e15a06721199381592634ef6806a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:19 +0200 Subject: [PATCH 053/174] First steps towards multi processing --- opengate/managers.py | 88 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 1755ff602..29e3849b3 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -7,6 +7,7 @@ import os from pathlib import Path import weakref +import multiprocessing import opengate_core as g4 @@ -1499,6 +1500,7 @@ def __init__(self, name="simulation", **kwargs): self._current_random_seed = None self.expected_number_of_events = None + self.mapping_run_timing_intervals = {} def __str__(self): s = ( @@ -1692,7 +1694,7 @@ def add_filter(self, filter_type, name): def multithreaded(self): return self.number_of_threads > 1 or self.force_multithread_mode - def _run_simulation_engine(self, start_new_process): + def _run_simulation_engine(self, start_new_process, process_index=None): """Method that creates a simulation engine in a context (with ...) and runs a simulation. Args: @@ -1707,10 +1709,55 @@ def _run_simulation_engine(self, start_new_process): with SimulationEngine(self) as se: se.new_process = start_new_process se.init_only = self.init_only + se.process_index = process_index output = se.run_engine() return output - def run(self, start_new_process=False): + def generate_run_timing_interval_map(self, number_of_processes): + if number_of_processes % len(self.run_timing_intervals) != 0: + fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}.") + + number_of_processes_per_run = int(number_of_processes / len(self.run_timing_intervals)) + run_timing_interval_map = {} + process_index = 0 + for i, rti in enumerate(self.run_timing_intervals): + t_start, t_end = rti + duration_original = t_end - t_start + duration_in_process = duration_original / number_of_processes_per_run + t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_all = [t_start] + t_intermediate + [t_end] + for t_s, t_e in zip(t_all[:-1], t_all[1:]): + run_timing_interval_map[process_index] = { + 'run_timing_intervals': [[t_s, t_e]], + 'lut_original_rti': [i] + } + process_index += 1 + return run_timing_interval_map + + def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): + # Important: this method is intended to run in a processes spawned off the main process. + # Therefore, self is actually a separate instance from the original simulation + # and we can safely adapt it in this process. + + # adapt the output_dir + self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + print("self.output_dir = ", self.output_dir) + + # adapt the run timing intervals in + self.run_timing_intervals = run_timing_intervals + # adapt all dynamic volumes + for vol in self.volume_manager.dynamic_volumes: + vol.reassign_subset_of_dynamic_params(lut_original_rti) + print(process_index) + print(f'Volume {vol.name}:') + print(vol.user_info["dynamic_params"]) + + output = self._run_simulation_engine(False, process_index=process_index) + print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + return output + + def run(self, start_new_process=False, number_of_sub_processes=0): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1718,6 +1765,9 @@ def run(self, start_new_process=False): "Run the simulation with one thread." ) + if number_of_sub_processes == 1: + start_new_process = True + # prepare sub process if start_new_process is True: """Important: put: @@ -1744,6 +1794,40 @@ def run(self, start_new_process=False): source.fTotalSkippedEvents = s.user_info.fTotalSkippedEvents source.fTotalZeroEvents = s.user_info.fTotalZeroEvents + elif number_of_sub_processes > 1: + run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + try: + multiprocessing.set_start_method("spawn") + except RuntimeError: + print("Could not set start method 'spawn'.") + pass + # q = multiprocessing.Queue() + with multiprocessing.Pool(len(run_timing_interval_map)) as pool: + print("pool._outqueue: ", pool._outqueue) # DEMO + results = [pool.apply_async(self.run_in_process, + (k, v['run_timing_intervals'], v['lut_original_rti'],)) + for k, v in run_timing_interval_map.items()] + # `.apply_async()` immediately returns AsyncResult (ApplyResult) object + print(results[0]) # DEMO + list_of_output = [res.get() for res in results] + print(f'list_of_output: {list_of_output}') + return list_of_output + # processes = [] + # for k, v in run_timing_interval_map.items(): + # p = multiprocessing.Process( + # target=target_func, + # args=(q, self.run_in_process, k, v['run_timing_intervals'], v['lut_original_rti']) + # ) + # p.start() + # processes.append(p) + # for p in processes: + # p.join() # (timeout=10) # timeout might be needed + # + # try: + # output = q.get(block=False) + # except queue.Empty: + # fatal("The queue is empty. The spawned process probably died.") + # return output else: # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. From d17682f0dc79a8b915bd60a07305a06ebb89c421 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:36 +0200 Subject: [PATCH 054/174] Add test080_multiprocessing_1.py --- .../tests/src/test080_multiprocessing_1.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100755 opengate/tests/src/test080_multiprocessing_1.py diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py new file mode 100755 index 000000000..3a4aedb30 --- /dev/null +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from opengate.utility import g4_units +import opengate as gate +from opengate.tests.utility import get_default_test_paths + + + +if __name__ == "__main__": + paths = get_default_test_paths( + __file__, output_folder="test080" + ) + + s = g4_units.s + + sim = gate.Simulation() + sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] + sim.output_dir = paths.output + + box1 = sim.add_volume('BoxVolume', 'box1') + box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + + n_proc = 4 * len(sim.run_timing_intervals) + run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) + print(run_timing_interval_map) + + output = sim.run(number_of_sub_processes=n_proc) + + + print("*** output ***") + for o in output: + print(o) + + print(f"ID of the main sim: {id(sim)}") + + ids = [e[2] for e in output] + assert id(sim) not in ids + + From fb3cb560225558b7a7f713bab507908c7ab43664 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:56 +0200 Subject: [PATCH 055/174] Add test030_dose_motion_dynamic_param_multiproc.py --- ...030_dose_motion_dynamic_param_multiproc.py | 133 ++++++++++++++++++ 1 file changed, 133 insertions(+) create mode 100755 opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py diff --git a/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py b/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py new file mode 100755 index 000000000..b0c00b92d --- /dev/null +++ b/opengate/tests/src/test030_dose_motion_dynamic_param_multiproc.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import opengate as gate +from scipy.spatial.transform import Rotation +from opengate.tests import utility + +if __name__ == "__main__": + paths = utility.get_default_test_paths( + __file__, "gate_test029_volume_time_rotation", "test030" + ) + + # create the simulation + sim = gate.Simulation() + + # main options + sim.g4_verbose = False + sim.visu = False + sim.random_seed = 983456 + sim.output_dir = paths.output + + # units + m = gate.g4_units.m + mm = gate.g4_units.mm + cm = gate.g4_units.cm + um = gate.g4_units.um + nm = gate.g4_units.nm + MeV = gate.g4_units.MeV + Bq = gate.g4_units.Bq + sec = gate.g4_units.second + + # change world size + sim.world.size = [1 * m, 1 * m, 1 * m] + + # add a simple fake volume to test hierarchy + # translation and rotation like in the Gate macro + fake = sim.add_volume("Box", "fake") + fake.size = [40 * cm, 40 * cm, 40 * cm] + fake.translation = [1 * cm, 2 * cm, 3 * cm] + fake.material = "G4_AIR" + fake.color = [1, 0, 1, 1] + + # waterbox + waterbox = sim.add_volume("Box", "waterbox") + waterbox.mother = fake + waterbox.size = [20 * cm, 20 * cm, 20 * cm] + waterbox.translation = [-3 * cm, -2 * cm, -1 * cm] + waterbox.rotation = Rotation.from_euler("y", -20, degrees=True).as_matrix() + waterbox.material = "G4_WATER" + waterbox.color = [0, 0, 1, 1] + + # physics + sim.physics_manager.set_production_cut("world", "all", 700 * um) + + # default source for tests + # the source is fixed at the center, only the volume will move + source = sim.add_source("GenericSource", "mysource") + source.energy.mono = 150 * MeV + source.particle = "proton" + source.position.type = "disc" + source.position.radius = 5 * mm + source.direction.type = "momentum" + source.direction.momentum = [0, 0, 1] + source.activity = 30000 * Bq + + # add dose actor + dose = sim.add_actor("DoseActor", "dose") + dose.output_filename = "test030.mhd" + dose.attached_to = waterbox + dose.size = [99, 99, 99] + mm = gate.g4_units.mm + dose.spacing = [2 * mm, 2 * mm, 2 * mm] + dose.translation = [2 * mm, 3 * mm, -2 * mm] + dose.edep.keep_data_per_run = True + dose.edep.auto_merge = True + dose.edep_uncertainty.active = True + + # add stat actor + stats = sim.add_actor("SimulationStatisticsActor", "Stats") + + # motion + n = 3 + interval_length = 1 * sec / n + sim.run_timing_intervals = [ + (i * interval_length, (i + 1) * interval_length) for i in range(n) + ] + gantry_angles_deg = [i * 20 for i in range(n)] + ( + dynamic_translations, + dynamic_rotations, + ) = gate.geometry.utility.get_transform_orbiting( + initial_position=fake.translation, axis="Y", angle_deg=gantry_angles_deg + ) + fake.add_dynamic_parametrisation( + translation=dynamic_translations, rotation=dynamic_rotations + ) + + # start simulation + sim.run(number_of_sub_processes=3 * len(sim.run_timing_intervals)) + + # # print results at the end + # print(stats) + # + # # tests + # stats_ref = utility.read_stat_file(paths.output_ref / "stats030.txt") + # is_ok = utility.assert_stats(stats, stats_ref, 0.11) + # + # print() + # gate.exception.warning("Difference for EDEP") + # is_ok = ( + # utility.assert_images( + # paths.output_ref / "test030-edep.mhd", + # dose.edep.get_output_path(), + # stats, + # tolerance=30, + # ignore_value=0, + # ) + # and is_ok + # ) + # + # print("\nDifference for uncertainty") + # is_ok = ( + # utility.assert_images( + # paths.output_ref / "test030-edep_uncertainty.mhd", + # dose.edep_uncertainty.get_output_path(), + # stats, + # tolerance=15, + # ignore_value=1, + # ) + # and is_ok + # ) + # + # utility.test_ok(is_ok) From aa5e7104ceafc1dbf5ff43f9705839869ce816d5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 23:54:15 +0000 Subject: [PATCH 056/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/managers.py | 47 +++++++++++++------ .../tests/src/test080_multiprocessing_1.py | 14 ++---- 2 files changed, 38 insertions(+), 23 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 29e3849b3..5ddea34ff 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1715,22 +1715,29 @@ def _run_simulation_engine(self, start_new_process, process_index=None): def generate_run_timing_interval_map(self, number_of_processes): if number_of_processes % len(self.run_timing_intervals) != 0: - fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}.") + fatal( + "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}." + ) - number_of_processes_per_run = int(number_of_processes / len(self.run_timing_intervals)) + number_of_processes_per_run = int( + number_of_processes / len(self.run_timing_intervals) + ) run_timing_interval_map = {} process_index = 0 for i, rti in enumerate(self.run_timing_intervals): t_start, t_end = rti duration_original = t_end - t_start duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_intermediate = [ + t_start + (j + 1) * duration_in_process + for j in range(number_of_processes_per_run - 1) + ] t_all = [t_start] + t_intermediate + [t_end] for t_s, t_e in zip(t_all[:-1], t_all[1:]): run_timing_interval_map[process_index] = { - 'run_timing_intervals': [[t_s, t_e]], - 'lut_original_rti': [i] + "run_timing_intervals": [[t_s, t_e]], + "lut_original_rti": [i], } process_index += 1 return run_timing_interval_map @@ -1741,7 +1748,7 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in @@ -1750,11 +1757,13 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): for vol in self.volume_manager.dynamic_volumes: vol.reassign_subset_of_dynamic_params(lut_original_rti) print(process_index) - print(f'Volume {vol.name}:') + print(f"Volume {vol.name}:") print(vol.user_info["dynamic_params"]) output = self._run_simulation_engine(False, process_index=process_index) - print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + print( + process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti + ) return output def run(self, start_new_process=False, number_of_sub_processes=0): @@ -1795,7 +1804,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0): source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + run_timing_interval_map = self.generate_run_timing_interval_map( + number_of_sub_processes + ) try: multiprocessing.set_start_method("spawn") except RuntimeError: @@ -1804,13 +1815,21 @@ def run(self, start_new_process=False, number_of_sub_processes=0): # q = multiprocessing.Queue() with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [pool.apply_async(self.run_in_process, - (k, v['run_timing_intervals'], v['lut_original_rti'],)) - for k, v in run_timing_interval_map.items()] + results = [ + pool.apply_async( + self.run_in_process, + ( + k, + v["run_timing_intervals"], + v["lut_original_rti"], + ), + ) + for k, v in run_timing_interval_map.items() + ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f'list_of_output: {list_of_output}') + print(f"list_of_output: {list_of_output}") return list_of_output # processes = [] # for k, v in run_timing_interval_map.items(): diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index 3a4aedb30..d79819b2e 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -5,11 +5,8 @@ from opengate.tests.utility import get_default_test_paths - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s @@ -17,8 +14,10 @@ sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output - box1 = sim.add_volume('BoxVolume', 'box1') - box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + box1 = sim.add_volume("BoxVolume", "box1") + box1.add_dynamic_parametrisation( + translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))] + ) n_proc = 4 * len(sim.run_timing_intervals) run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) @@ -26,7 +25,6 @@ output = sim.run(number_of_sub_processes=n_proc) - print("*** output ***") for o in output: print(o) @@ -35,5 +33,3 @@ ids = [e[2] for e in output] assert id(sim) not in ids - - From 76fbfba182a619f90bd2d13d4baf2966f7038009 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:08 +0200 Subject: [PATCH 057/174] Implement MultiProcessingHandler classes --- opengate/processing.py | 100 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 99 insertions(+), 1 deletion(-) diff --git a/opengate/processing.py b/opengate/processing.py index 7de54292e..fa3d63c7a 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -2,7 +2,7 @@ import queue from .exception import fatal - +from .base import GateObject # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): @@ -28,3 +28,101 @@ def dispatch_to_subprocess(func, *args, **kwargs): return q.get(block=False) except queue.Empty: fatal("The queue is empty. The spawned process probably died.") + + +def _setter_hook_number_of_processes(self, number_of_processes): + if self.number_of_processes != number_of_processes: + self._dispatch_configuration = {} + self.process_run_index_map = {} + self.inverse_process_to_run_index_map = {} + return number_of_processes + + +class MultiProcessingHandlerBase(GateObject): + + user_info_defaults = { + 'number_of_processes': ( + 1, + { + "doc": "In how many parallel process should the simulation be run? " + "Must be a multiple of the number of run timing intervals. ", + "setter_hook": _setter_hook_number_of_processes, + } + ) + } + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._dispatch_configuration = {} + self.process_run_index_map = {} + self.inverse_process_to_run_index_map = {} + + @property + def original_run_timing_intervals(self): + return self.simulation.run_timing_intervals + + @property + def dispatch_configuration(self): + return self._dispatch_configuration + + @dispatch_configuration.setter + def dispatch_configuration(self, config): + self._dispatch_configuration = config + self.update_process_to_run_index_map() + self.update_inverse_process_to_run_index_map() + + @property + def original_run_timing_indices(self): + return [i for i in range(len(self.original_run_timing_intervals))] + + def initialize(self): + self.generate_dispatch_configuration() + + def generate_dispatch_configuration(self): + raise NotImplementedError + + def update_process_to_run_index_map(self): + """Creates a mapping (process index, local run index) -> (original run index) + """ + if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: + fatal("Unable to update the mapping 'process to original run index' " + "because no dispatch configuration is available. ") + p_r_map = {} + for k, v in self.dispatch_configuration.items(): + for lri, ori in enumerate(v['lut_original_rti']): + p_r_map[(k, lri)] = ori + self.process_run_index_map = p_r_map + + def update_inverse_process_to_run_index_map(self): + p_r_map_inv = dict([(i, []) for i in set(self.process_run_index_map.values())]) + for k, v in self.process_run_index_map.items(): + p_r_map_inv[v].append(k) + self.inverse_process_to_run_index_map = p_r_map_inv + + +class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): + + def generate_dispatch_configuration(self): + if self.number_of_processes % len(self.original_run_timing_intervals) != 0: + fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}.") + + number_of_processes_per_run = int(self.number_of_processes / len(self.original_run_timing_intervals)) + dispatch_configuration = {} + process_index = 0 + for i, rti in enumerate(self.original_run_timing_intervals): + t_start, t_end = rti + duration_original = t_end - t_start + duration_in_process = duration_original / number_of_processes_per_run + t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_all = [t_start] + t_intermediate + [t_end] + for t_s, t_e in zip(t_all[:-1], t_all[1:]): + dispatch_configuration[process_index] = { + 'run_timing_intervals': [[t_s, t_e]], + 'lut_original_rti': [i], + 'process_id': None + } + process_index += 1 + self.dispatch_configuration = dispatch_configuration + return dispatch_configuration + From 6973f13e6ac44f4a3b747cef4a0c1fee7fc10f5f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:21 +0200 Subject: [PATCH 058/174] create test080_multiprocessing_handler.py --- .../src/test080_multiprocessing_handler.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100755 opengate/tests/src/test080_multiprocessing_handler.py diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py new file mode 100755 index 000000000..daabbff61 --- /dev/null +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from opengate.utility import g4_units +import opengate as gate +from opengate.tests.utility import get_default_test_paths +from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval + + + +if __name__ == "__main__": + paths = get_default_test_paths( + __file__, output_folder="test080" + ) + + s = g4_units.s + + sim = gate.Simulation() + sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] + sim.output_dir = paths.output + + box1 = sim.add_volume('BoxVolume', 'box1') + box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + + n_proc = 4 * len(sim.run_timing_intervals) + + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', + simulation=sim, + number_of_processes=n_proc) + From 026052239a203e2171d543fa9884f4b10cac73de Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:13:00 +0200 Subject: [PATCH 059/174] Implement import_data_from_actor_output() --- opengate/actors/actoroutput.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index ceba7d140..073d9723d 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -367,6 +367,9 @@ def load_data(self, which): f"but it should be implemented in the specific derived class" ) + def import_data_from_actor_output(self, *actor_output, **kwargs): + raise NotImplementedError("This is the base class. ") + class MergeableActorOutput(ActorOutputBase): @@ -416,6 +419,22 @@ def end_of_simulation(self, **kwargs): f"A developer needs to fix this. " ) + def import_data_from_actor_output(self, *actor_output, discard_existing_data=True): + run_indices_to_import = set() + for ao in actor_output: + run_indices_to_import.union(ao.data_per_run.keys()) + which_output_per_run_index = dict([(r, [ao for ao in actor_output if r in ao.data_per_run]) for r in run_indices_to_import]) + for r in run_indices_to_import: + data_to_import = [ao.data_per_run[r] for ao in which_output_per_run_index[r]] + if discard_existing_data is False and r in self.data_per_run: + data_to_import.append(self.data_per_run[r]) + self.data_per_run[r] = merge_data(data_to_import) + merged_data_to_import = [ao.merged_data for ao in actor_output if ao.merged_data is not None] + if discard_existing_data is False and self.merged_data is not None: + merged_data_to_import.append(self.merged_data) + if len(merged_data_to_import) > 0: + self.merged_data = merge_data(merged_data_to_import) + class ActorOutputUsingDataItemContainer(MergeableActorOutput): From 3b93451f52b5a06e0468964460082ccae0bb767c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:14:40 +0200 Subject: [PATCH 060/174] Implement import_user_output_from_actor() --- opengate/actors/base.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 12e6a1217..4806505fb 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -429,6 +429,19 @@ def recover_user_output(self, actor): for v in self.interfaces_to_user_output.values(): v.belongs_to_actor = self + def import_user_output_from_actor(self, *actor): + if not all([self.type_name == a.type_name for a in actor]): + fatal("An actor can only import user output from the same type of actor.") + if len(actor) == 1: + self.recover_user_output(actor[0]) + else: + for k in self.user_output: + try: + self.user_output[k].import_data_from_actor_output(*[a.user_output[k] for a in actor]) + except NotImplementedError: + self.warn_user(f"User output {k} in {self.type_name} cannot be imported " + f"because the function is not yet implemented for this type of output.") + def store_output_data(self, output_name, run_index, *data): self._assert_output_exists(output_name) self.user_output[output_name].store_data(run_index, *data) From 4d141c3f4777b16b57b355e0b9f303d2e017012f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:14:56 +0200 Subject: [PATCH 061/174] Implement import_user_output_from_actor in ActorBase --- opengate/actors/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 4806505fb..942488dd3 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -471,5 +471,8 @@ def EndSimulationAction(self): """Default virtual method for inheritance""" pass + def FinalizeSimulation(self): + pass + process_cls(ActorBase) From f06e627d7ac77b173c97cd16fdc88d3e5a4ad1d2 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:16:11 +0200 Subject: [PATCH 062/174] Change local variable name in reassign_dynamic_params_for_process() --- opengate/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/base.py b/opengate/base.py index a5f50acae..208e5d2b2 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -763,13 +763,13 @@ def add_dynamic_parametrisation(self, name=None, **params): s += f"{k}: {v}\n" log.debug(s) - def reassign_subset_of_dynamic_params(self, subset): + def reassign_dynamic_params_for_process(self, run_indices): # loop over all dynamic parametrisations of this object, for param in self.user_info["dynamic_params"].values(): for k, v in param.items(): # extract the subset of entries to the list that are relevant to this process if k in self.dynamic_user_info: - param[k] = [v[i] for i in subset] + param[k] = [v[i] for i in run_indices] def create_changers(self): # this base class implementation is here to keep inheritance intact. From 3ea4bc7418668cd44a1516051c0981ab4f4ba27b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:17:09 +0200 Subject: [PATCH 063/174] Implement FinalizeSimulation() in VoxelDepositActor --- opengate/actors/doseactors.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/opengate/actors/doseactors.py b/opengate/actors/doseactors.py index 6ffae3ea3..a777273c3 100644 --- a/opengate/actors/doseactors.py +++ b/opengate/actors/doseactors.py @@ -246,12 +246,17 @@ def EndOfRunActionMasterThread(self, run_index): u.end_of_run(run_index) return 0 - def EndSimulationAction(self): - # inform actor output that this simulation is over and write data + def inform_user_output_about_end(self): for u in self.user_output.values(): if u.get_active(item="all"): u.end_of_simulation() + def EndSimulationAction(self): + self.inform_user_output_about_end() + + def FinalizeSimulation(self): + self.inform_user_output_about_end() + def compute_std_from_sample( number_of_samples, value_array, squared_value_array, correct_bias=False From 504673f68ec50eeb7348cc8dcb32ef139c44b00c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:17:40 +0200 Subject: [PATCH 064/174] Add simulation_id to SimulationOutput --- opengate/engines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/engines.py b/opengate/engines.py index 16fd96f06..db8c5508d 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -922,6 +922,7 @@ def __init__(self): self.sources_by_thread = {} self.pid = os.getpid() self.ppid = os.getppid() + self.simulation_id = None self.current_random_seed = None self.user_hook_log = [] self.warnings = None From 76b0f8655ae8f8dbf209b592c9ce55ec90f4523e Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:18:23 +0200 Subject: [PATCH 065/174] Implement SimulationOutput.store_output_from_simulation_engine() --- opengate/engines.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/opengate/engines.py b/opengate/engines.py index db8c5508d..bf58ee9d3 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -927,6 +927,15 @@ def __init__(self): self.user_hook_log = [] self.warnings = None + def store_output_from_simulation_engine(self, simulation_engine): + self.store_actors(simulation_engine) + self.store_sources(simulation_engine) + self.store_hook_log(simulation_engine) + self.current_random_seed = simulation_engine.current_random_seed + self.expected_number_of_events = simulation_engine.source_engine.expected_number_of_events + self.warnings = simulation_engine.simulation.warnings + self.simulation_id = id(simulation_engine.simulation) + def store_actors(self, simulation_engine): self.actors = simulation_engine.simulation.actor_manager.actors for actor in self.actors.values(): @@ -1177,12 +1186,14 @@ def run_engine(self): self.user_hook_after_run(self) # prepare the output - output.store_actors(self) - output.store_sources(self) - output.store_hook_log(self) - output.current_random_seed = self.current_random_seed - output.expected_number_of_events = self.source_engine.expected_number_of_events - output.warnings = self.simulation.warnings + output.store_output_from_simulation_engine(self) + # output.store_actors(self) + # output.store_sources(self) + # output.store_hook_log(self) + # output.current_random_seed = self.current_random_seed + # output.expected_number_of_events = self.source_engine.expected_number_of_events + # output.warnings = self.simulation.warnings + # output.simulation_id = id(self.simulation) return output From fd799dd191aa2b882bccf72bf4a6fc8853a2cb46 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:20:44 +0200 Subject: [PATCH 066/174] Simplify code in run_engine() --- opengate/engines.py | 36 +++++++++--------------------------- 1 file changed, 9 insertions(+), 27 deletions(-) diff --git a/opengate/engines.py b/opengate/engines.py index bf58ee9d3..d2b451683 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1165,36 +1165,18 @@ def run_engine(self): else: self.user_hook_after_init(self) - # if init only, we stop - if self.simulation.init_only: - output.store_actors(self) - output.store_sources(self) - output.store_hook_log(self) - output.current_random_seed = self.current_random_seed - output.expected_number_of_events = ( - self.source_engine.expected_number_of_events - ) - return output - - # go - self.start_and_stop() - - # start visualization if vrml or gdml - self.visu_engine.start_visualisation() - if self.user_hook_after_run: - log.info("Simulation: User hook after run") - self.user_hook_after_run(self) + # if init only, we skip the actual run + if not self.simulation.init_only: + # go + self.start_and_stop() + # start visualization if vrml or gdml + self.visu_engine.start_visualisation() + if self.user_hook_after_run: + log.info("Simulation: User hook after run") + self.user_hook_after_run(self) # prepare the output output.store_output_from_simulation_engine(self) - # output.store_actors(self) - # output.store_sources(self) - # output.store_hook_log(self) - # output.current_random_seed = self.current_random_seed - # output.expected_number_of_events = self.source_engine.expected_number_of_events - # output.warnings = self.simulation.warnings - # output.simulation_id = id(self.simulation) - return output def start_and_stop(self): From 3b6d94d7c02882ef46b29855d2de22409a0dcfa4 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:21:13 +0200 Subject: [PATCH 067/174] Implement SimulationMetaData class --- opengate/managers.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 5ddea34ff..36591ef8a 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1200,6 +1200,39 @@ def print_material_database_names(self): print(self.dump_material_database_names()) +class SimulationMetaData(Box): + + def __init__(self, *args, simulation_output=None, **kwargs): + super().__init__(*args, **kwargs) + self.warnings = [] + self.expected_number_of_events = 0 # FIXME workaround + self.user_hook_log = [] + self.current_random_seed = None + self.number_of_sub_processes = None + self.start_new_process = None + if simulation_output is not None: + self.import_from_simulation_output(simulation_output) + + def reset_warnings(self): + self.warnings = [] + + def import_from_simulation_meta_data(self, *meta_data): + for m in meta_data: + self.warnings.extend(m.warnings) + self.expected_number_of_events += m.expected_number_of_events + self.user_hook_log.extend(m.user_hook_log) + if self.current_random_seed is None: + self.current_random_seed = m.current_random_seed + + def import_from_simulation_output(self, *sim_output): + for so in sim_output: + self.warnings.extend(so.warnings) + self.expected_number_of_events += so.expected_number_of_events + self.user_hook_log.extend(so.user_hook_log) + if self.current_random_seed is None: + self.current_random_seed = so.current_random_seed + + def setter_hook_verbose_level(self, verbose_level): try: level = int(verbose_level) From c4cf0f0a52c2772287b80d73e544c1269c3445cd Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:23:03 +0200 Subject: [PATCH 068/174] Use SimulationMetaData in Simulation --- opengate/managers.py | 30 +++++++++--------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 36591ef8a..94f9642b1 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1509,13 +1509,13 @@ def __init__(self, name="simulation", **kwargs): kwargs.pop("simulation", None) super().__init__(name=name, **kwargs) - # list to store warning messages issued somewhere in the simulation - self._user_warnings = [] - # for debug only self.verbose_getstate = False self.verbose_close = False + self.meta_data = SimulationMetaData() + self.meta_data_per_process = {} + # main managers self.volume_manager = VolumeManager(self) self.source_manager = SourceManager(self) @@ -1527,13 +1527,12 @@ def __init__(self, name="simulation", **kwargs): self.user_hook_after_init = None self.user_hook_after_init_arg = None self.user_hook_after_run = None - self.user_hook_log = None - - # read-only info - self._current_random_seed = None - self.expected_number_of_events = None - self.mapping_run_timing_intervals = {} + def __getattr__(self, item): + try: + return self.meta_data[item] + except KeyError: + raise AttributeError(f"Item {item} not found in {type(self)}, nor in the simulation meta data. ") def __str__(self): s = ( @@ -1561,21 +1560,10 @@ def use_multithread(self): def world(self): return self.volume_manager.world_volume - @property - def current_random_seed(self): - return self._current_random_seed - - @property - def warnings(self): - return self._user_warnings - - def reset_warnings(self): - self._user_warnings = [] - def warn_user(self, message): # We need this specific implementation because the Simulation does not hold a reference 'simulation', # as required by the base class implementation of warn_user() - self._user_warnings.append(message) + self.warnings.append(message) super().warn_user(message) def to_dictionary(self): From 8b28b15ab67923c87ae4dd96806b39f8cc2bf5c2 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:24:15 +0200 Subject: [PATCH 069/174] Update MultiProcessingHandlerBase class --- opengate/processing.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/opengate/processing.py b/opengate/processing.py index fa3d63c7a..27ac14901 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -68,37 +68,45 @@ def dispatch_configuration(self): @dispatch_configuration.setter def dispatch_configuration(self, config): self._dispatch_configuration = config - self.update_process_to_run_index_map() - self.update_inverse_process_to_run_index_map() + self.update_process_to_run_index_maps() - @property - def original_run_timing_indices(self): - return [i for i in range(len(self.original_run_timing_intervals))] + def assert_dispatch_configuration(self): + if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: + fatal("No dispatch configuration is available. ") def initialize(self): self.generate_dispatch_configuration() + def get_original_run_timing_indices_for_process(self, process_index): + return self.dispatch_configuration[process_index]['lut_original_rti'] + + def get_run_timing_intervals_for_process(self, process_index): + return self.dispatch_configuration[process_index]['run_timing_intervals'] + def generate_dispatch_configuration(self): raise NotImplementedError - def update_process_to_run_index_map(self): + def update_process_to_run_index_maps(self): """Creates a mapping (process index, local run index) -> (original run index) """ - if self.dispatch_configuration is None or len(self.dispatch_configuration) == 0: - fatal("Unable to update the mapping 'process to original run index' " - "because no dispatch configuration is available. ") + self.assert_dispatch_configuration() + p_r_map = {} for k, v in self.dispatch_configuration.items(): for lri, ori in enumerate(v['lut_original_rti']): p_r_map[(k, lri)] = ori - self.process_run_index_map = p_r_map - def update_inverse_process_to_run_index_map(self): - p_r_map_inv = dict([(i, []) for i in set(self.process_run_index_map.values())]) - for k, v in self.process_run_index_map.items(): + # and the inverse + p_r_map_inv = dict([(i, []) for i in set(p_r_map.values())]) + for k, v in p_r_map.items(): p_r_map_inv[v].append(k) + + self.process_run_index_map = p_r_map self.inverse_process_to_run_index_map = p_r_map_inv + def dispatch_to_processes(self, dispatch_function, *args): + return [dispatch_function(i, *args) for i in range(len(self.dispatch_configuration))] + class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): From b1f2d214e7237aa9164572ea0da7d7f1026c49bf Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:25:50 +0200 Subject: [PATCH 070/174] Remove obsolete generate_run_timing_interval_map() method --- opengate/managers.py | 57 +++++++------------------------------------- 1 file changed, 8 insertions(+), 49 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 94f9642b1..e518f63da 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1734,42 +1734,13 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def generate_run_timing_interval_map(self, number_of_processes): - if number_of_processes % len(self.run_timing_intervals) != 0: - fatal( - "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {number_of_processes}, while there are {len(self.run_timing_intervals)}." - ) - - number_of_processes_per_run = int( - number_of_processes / len(self.run_timing_intervals) - ) - run_timing_interval_map = {} - process_index = 0 - for i, rti in enumerate(self.run_timing_intervals): - t_start, t_end = rti - duration_original = t_end - t_start - duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [ - t_start + (j + 1) * duration_in_process - for j in range(number_of_processes_per_run - 1) - ] - t_all = [t_start] + t_intermediate + [t_end] - for t_s, t_e in zip(t_all[:-1], t_all[1:]): - run_timing_interval_map[process_index] = { - "run_timing_intervals": [[t_s, t_e]], - "lut_original_rti": [i], - } - process_index += 1 - return run_timing_interval_map - def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") + self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in @@ -1778,13 +1749,11 @@ def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): for vol in self.volume_manager.dynamic_volumes: vol.reassign_subset_of_dynamic_params(lut_original_rti) print(process_index) - print(f"Volume {vol.name}:") + print(f'Volume {vol.name}:') print(vol.user_info["dynamic_params"]) output = self._run_simulation_engine(False, process_index=process_index) - print( - process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti - ) + print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) return output def run(self, start_new_process=False, number_of_sub_processes=0): @@ -1825,9 +1794,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0): source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map( - number_of_sub_processes - ) + run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) try: multiprocessing.set_start_method("spawn") except RuntimeError: @@ -1836,21 +1803,13 @@ def run(self, start_new_process=False, number_of_sub_processes=0): # q = multiprocessing.Queue() with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [ - pool.apply_async( - self.run_in_process, - ( - k, - v["run_timing_intervals"], - v["lut_original_rti"], - ), - ) - for k, v in run_timing_interval_map.items() - ] + results = [pool.apply_async(self.run_in_process, + (k, v['run_timing_intervals'], v['lut_original_rti'],)) + for k, v in run_timing_interval_map.items()] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f"list_of_output: {list_of_output}") + print(f'list_of_output: {list_of_output}') return list_of_output # processes = [] # for k, v in run_timing_interval_map.items(): From 2a033b7c4c6742e1a729d5d57eaf8ed77109ec44 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:26:06 +0200 Subject: [PATCH 071/174] Update imports in managers.py --- opengate/managers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index e518f63da..934e63fc2 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -47,7 +47,7 @@ ) from .userinfo import UserInfo from .serialization import dump_json, dumps_json, loads_json, load_json -from .processing import dispatch_to_subprocess +from .processing import dispatch_to_subprocess, MultiProcessingHandlerEqualPerRunTimingInterval from .geometry.volumes import ( VolumeBase, @@ -1734,7 +1734,7 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def run_in_process(self, process_index, run_timing_intervals, lut_original_rti): + def run_in_process(self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. From 9a6f871004a606ab2e8c4bedcfd959d5594b7162 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:26:37 +0200 Subject: [PATCH 072/174] Update run_in_process() --- opengate/managers.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 934e63fc2..b10031c2f 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1744,16 +1744,26 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in - self.run_timing_intervals = run_timing_intervals + self.run_timing_intervals = multi_process_handler.get_run_timing_intervals_for_process(process_index) # adapt all dynamic volumes for vol in self.volume_manager.dynamic_volumes: - vol.reassign_subset_of_dynamic_params(lut_original_rti) + vol.reassign_dynamic_params_for_process( + multi_process_handler.get_original_run_timing_indices_for_process(process_index) + ) print(process_index) print(f'Volume {vol.name}:') print(vol.user_info["dynamic_params"]) - output = self._run_simulation_engine(False, process_index=process_index) - print(process_index, os.getpid(), id(self), run_timing_intervals, lut_original_rti) + if avoid_write_to_disk_in_subprocess is True: + for actor in self.actor_manager.actors.values(): + actor.write_to_disk = False + + output = self._run_simulation_engine(True, process_index=process_index) + print(process_index, + os.getpid(), + id(self), + multi_process_handler.get_run_timing_intervals_for_process(process_index), + multi_process_handler.get_original_run_timing_indices_for_process(process_index)) return output def run(self, start_new_process=False, number_of_sub_processes=0): From 18b49a0309b0522380355aa16a7560dfab1bf8ef Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:27:06 +0200 Subject: [PATCH 073/174] Store number_of_sub_processes and start_new_process in simulation_meta_data --- opengate/managers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index b10031c2f..b6ef4ea83 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1777,6 +1777,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0): if number_of_sub_processes == 1: start_new_process = True + self.meta_data.number_of_sub_processes = number_of_sub_processes + self.meta_data.start_new_process = start_new_process + # prepare sub process if start_new_process is True: """Important: put: From 0a4b23770ac59b3ece665ce37e3ad8e1ca6b879a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:27:54 +0200 Subject: [PATCH 074/174] Introduce avoid_write_to_disk_in_subprocess kwarg in Simulation.run() --- opengate/managers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index b6ef4ea83..c94c7826d 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1766,7 +1766,7 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di multi_process_handler.get_original_run_timing_indices_for_process(process_index)) return output - def run(self, start_new_process=False, number_of_sub_processes=0): + def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1817,8 +1817,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0): with multiprocessing.Pool(len(run_timing_interval_map)) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO results = [pool.apply_async(self.run_in_process, - (k, v['run_timing_intervals'], v['lut_original_rti'],)) - for k, v in run_timing_interval_map.items()] + (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] From 805468ae2e72d6665654278409bae7c68b47e3d9 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:29:09 +0200 Subject: [PATCH 075/174] Use multi_proc_handler in Simulation.run() --- opengate/managers.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index c94c7826d..cbbfd923a 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1807,14 +1807,17 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to source.fTotalZeroEvents = s.user_info.fTotalZeroEvents elif number_of_sub_processes > 1: - run_timing_interval_map = self.generate_run_timing_interval_map(number_of_sub_processes) + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', + simulation=self, + number_of_processes=number_of_sub_processes) + multi_proc_handler.initialize() try: multiprocessing.set_start_method("spawn") except RuntimeError: print("Could not set start method 'spawn'.") pass # q = multiprocessing.Queue() - with multiprocessing.Pool(len(run_timing_interval_map)) as pool: + with multiprocessing.Pool(number_of_sub_processes) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO results = [pool.apply_async(self.run_in_process, (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] From fa5ece3ffeecf675d415414b69accb270199bee9 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:29:59 +0200 Subject: [PATCH 076/174] Trigger import_user_output_from_actor() after multi_proc run --- opengate/managers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index cbbfd923a..2f6bb844c 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1826,6 +1826,9 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to list_of_output = [res.get() for res in results] print(f'list_of_output: {list_of_output}') return list_of_output + + for actor in self.actor_manager.actors.values(): + actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) # processes = [] # for k, v in run_timing_interval_map.items(): # p = multiprocessing.Process( From af063354dbbf057d1c63c92dd173788481315fb7 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:31:14 +0200 Subject: [PATCH 077/174] store meta_data after run --- opengate/managers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 2f6bb844c..8393bd6d7 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1806,6 +1806,8 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to source.fTotalSkippedEvents = s.user_info.fTotalSkippedEvents source.fTotalZeroEvents = s.user_info.fTotalZeroEvents + self.meta_data.import_from_simulation_output(output) + elif number_of_sub_processes > 1: multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', simulation=self, @@ -1829,6 +1831,10 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) + + self.meta_data.import_from_simulation_output(*list_of_output) + for i, o in enumerate(list_of_output): + self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) # processes = [] # for k, v in run_timing_interval_map.items(): # p = multiprocessing.Process( @@ -1849,6 +1855,7 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. output = self._run_simulation_engine(False) + self.meta_data.import_from_simulation_output(output) self._user_warnings.extend(output.warnings) From 002c00056c6bf995623c8cbd6b24de88bb1960aa Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:32:45 +0200 Subject: [PATCH 078/174] Get parameters from sources after multiproc run --- opengate/managers.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 8393bd6d7..0de76ee62 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1851,6 +1851,21 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # except queue.Empty: # fatal("The queue is empty. The spawned process probably died.") # return output + + # FIXME: temporary workaround to collect extra info from output + # will be implemented similar to actor.import_user_output_from_actor after source refactoring + for source in self.source_manager.user_info_sources.values(): + for o in list_of_output: + try: + s = o.get_source(source.name) + except: + continue + if "fTotalSkippedEvents" in s.user_info.__dict__: + if not hasattr(source, "fTotalSkippedEvents"): + source.fTotalSkippedEvents = 0 + source.fTotalZeroEvents = 0 + source.fTotalSkippedEvents += s.user_info.fTotalSkippedEvents + source.fTotalZeroEvents += s.user_info.fTotalZeroEvents else: # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. From 43ae7d25746dd7551b0666d6b6eaf292e633f3b5 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:33:00 +0200 Subject: [PATCH 079/174] Remove obsolete code --- opengate/managers.py | 29 ++++++----------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 0de76ee62..e7221d8c7 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1827,7 +1827,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to print(results[0]) # DEMO list_of_output = [res.get() for res in results] print(f'list_of_output: {list_of_output}') - return list_of_output for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) @@ -1835,22 +1834,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) - # processes = [] - # for k, v in run_timing_interval_map.items(): - # p = multiprocessing.Process( - # target=target_func, - # args=(q, self.run_in_process, k, v['run_timing_intervals'], v['lut_original_rti']) - # ) - # p.start() - # processes.append(p) - # for p in processes: - # p.join() # (timeout=10) # timeout might be needed - # - # try: - # output = q.get(block=False) - # except queue.Empty: - # fatal("The queue is empty. The spawned process probably died.") - # return output # FIXME: temporary workaround to collect extra info from output # will be implemented similar to actor.import_user_output_from_actor after source refactoring @@ -1872,14 +1855,14 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - self._user_warnings.extend(output.warnings) + # self._user_warnings.extend(output.warnings) - # FIXME workaround - self.expected_number_of_events = output.expected_number_of_events + # # FIXME workaround + # self.expected_number_of_events = output.expected_number_of_events + + # self.user_hook_log = output.user_hook_log + # self._current_random_seed = output.current_random_seed - # store the hook log - self.user_hook_log = output.user_hook_log - self._current_random_seed = output.current_random_seed if self.store_json_archive is True: self.to_json_file() From 78eecbb166bca963385c7d5c7d18f4c87587e4a7 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:34:31 +0200 Subject: [PATCH 080/174] Trigger FinalizeSimulation() at the end of a run --- opengate/managers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index e7221d8c7..d08748c8b 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1863,6 +1863,8 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # self.user_hook_log = output.user_hook_log # self._current_random_seed = output.current_random_seed + for actor in self.actor_manager.actors.values(): + actor.FinalizeSimulation() if self.store_json_archive is True: self.to_json_file() From c6b9ae126edcdaac5c1c6bcb83d1bf1c747ba07d Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:20 +0200 Subject: [PATCH 081/174] remove obsolete code --- opengate/managers.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index d08748c8b..440f6ae02 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1855,14 +1855,6 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - # self._user_warnings.extend(output.warnings) - - # # FIXME workaround - # self.expected_number_of_events = output.expected_number_of_events - - # self.user_hook_log = output.user_hook_log - # self._current_random_seed = output.current_random_seed - for actor in self.actor_manager.actors.values(): actor.FinalizeSimulation() From d6d5b37fd87572e53168793073ba2c91a5bb3f3a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:33 +0200 Subject: [PATCH 082/174] Add test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 126 ++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100755 opengate/tests/src/test008_dose_actor_multiproc.py diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py new file mode 100755 index 000000000..5cf6099b6 --- /dev/null +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import opengate as gate +from opengate.tests import utility +from scipy.spatial.transform import Rotation +from pathlib import Path + +if __name__ == "__main__": + paths = utility.get_default_test_paths(__file__, "gate_test008_dose_actor") + ref_path = paths.gate_output + + # create the simulation + sim = gate.Simulation() + + # main options + sim.g4_verbose = False + sim.g4_verbose_level = 1 + sim.visu = False + sim.random_seed = 12345678 + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem + + # shortcuts for units + m = gate.g4_units.m + cm = gate.g4_units.cm + + # change world size + world = sim.world + world.size = [1 * m, 1 * m, 1 * m] + + # add a simple fake volume to test hierarchy + # translation and rotation like in the Gate macro + fake = sim.add_volume("Box", "fake") + fake.size = [40 * cm, 40 * cm, 40 * cm] + fake.translation = [1 * cm, 2 * cm, 3 * cm] + fake.rotation = Rotation.from_euler("x", 10, degrees=True).as_matrix() + fake.material = "G4_AIR" + fake.color = [1, 0, 1, 1] + + # waterbox + waterbox = sim.add_volume("Box", "waterbox") + waterbox.mother = "fake" + waterbox.size = [10 * cm, 10 * cm, 10 * cm] + waterbox.translation = [-3 * cm, -2 * cm, -1 * cm] + waterbox.rotation = Rotation.from_euler("y", 20, degrees=True).as_matrix() + waterbox.material = "G4_WATER" + waterbox.color = [0, 0, 1, 1] + + # physics + sim.physics_manager.physics_list_name = "QGSP_BERT_EMV" + sim.physics_manager.enable_decay = False + sim.physics_manager.apply_cuts = True # default + um = gate.g4_units.um + global_cut = 700 * um + sim.physics_manager.global_production_cuts.gamma = global_cut + sim.physics_manager.global_production_cuts.electron = global_cut + sim.physics_manager.global_production_cuts.positron = global_cut + sim.physics_manager.global_production_cuts.proton = global_cut + + # default source for tests + source = sim.add_source("GenericSource", "mysource") + MeV = gate.g4_units.MeV + Bq = gate.g4_units.Bq + source.energy.mono = 150 * MeV + nm = gate.g4_units.nm + source.particle = "proton" + source.position.type = "disc" + source.position.radius = 1 * nm + source.direction.type = "momentum" + source.direction.momentum = [0, 0, 1] + source.activity = 50000 * Bq + + # add dose actor + dose = sim.add_actor("DoseActor", "dose") + dose.attached_to = "waterbox" + dose.size = [99, 99, 99] + mm = gate.g4_units.mm + dose.spacing = [2 * mm, 2 * mm, 2 * mm] + dose.translation = [2 * mm, 3 * mm, -2 * mm] + dose.edep_uncertainty.active = True + dose.hit_type = "random" + dose.output_coordinate_system = "local" + dose.output_filename = "test.nii.gz" + + # add stat actor + stat = sim.add_actor("SimulationStatisticsActor", "Stats") + stat.track_types_flag = True + + # start simulation + sim.run(number_of_sub_processes=4 ) + + # # print results at the end + # print(stat) + # print(dose) + # + # # tests + # stats_ref = utility.read_stat_file(ref_path / "stat.txt") + # is_ok = utility.assert_stats(stat, stats_ref, 0.11) + # + # print("\nDifference for EDEP") + # is_ok = ( + # utility.assert_images( + # ref_path / "output-Edep.mhd", + # dose.edep.get_output_path(), + # stat, + # tolerance=13, + # ignore_value=0, + # sum_tolerance=1, + # ) + # and is_ok + # ) + # + # print("\nDifference for uncertainty") + # is_ok = ( + # utility.assert_images( + # ref_path / "output-Edep-Uncertainty.mhd", + # dose.edep_uncertainty.get_output_path(), + # stat, + # tolerance=30, + # ignore_value=1, + # sum_tolerance=1, + # ) + # and is_ok + # ) + # + # utility.test_ok(is_ok) From 53ceb61e3896a7d0d47ed29379c909fe015c6627 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:51 +0200 Subject: [PATCH 083/174] Update test080_multiprocessing_1.py --- opengate/tests/src/test080_multiprocessing_1.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index d79819b2e..90bf61400 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -13,6 +13,7 @@ sim = gate.Simulation() sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output + sim.store_json_archive = True box1 = sim.add_volume("BoxVolume", "box1") box1.add_dynamic_parametrisation( @@ -20,8 +21,6 @@ ) n_proc = 4 * len(sim.run_timing_intervals) - run_timing_interval_map = sim.generate_run_timing_interval_map(n_proc) - print(run_timing_interval_map) output = sim.run(number_of_sub_processes=n_proc) @@ -31,5 +30,5 @@ print(f"ID of the main sim: {id(sim)}") - ids = [e[2] for e in output] + ids = [o.simulation_id for o in output] assert id(sim) not in ids From 38896f053f659fafb52d9ad8b88b6e061ee47e29 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:36:49 +0200 Subject: [PATCH 084/174] Update GateObject to rename property 'warnings' in Simulation --- opengate/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/base.py b/opengate/base.py index 208e5d2b2..91000ebd2 100644 --- a/opengate/base.py +++ b/opengate/base.py @@ -656,7 +656,7 @@ def warn_user(self, message): self._temporary_warning_cache.append(message) # if possible, register the warning directly else: - self.simulation._user_warnings.append(message) + self.simulation.warnings.append(message) warning(message) From 0e9928de77b86a9dd098a3538eb2d853656f247d Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:37:22 +0200 Subject: [PATCH 085/174] Update run_engine to use Simulation.meta_data --- opengate/engines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/engines.py b/opengate/engines.py index d2b451683..41d644d4c 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -1151,7 +1151,7 @@ def run_engine(self): # because everything else has already been executed in the main process # and potential warnings have already been registered. if self.new_process is True: - self.simulation.reset_warnings() + self.simulation.meta_data.reset_warnings() # initialization self.initialize() From 00203077f5e88fb9834d40b8f1ca7ef8ec12bf68 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 00:39:23 +0000 Subject: [PATCH 086/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/actors/actoroutput.py | 15 +++- opengate/actors/base.py | 10 ++- opengate/engines.py | 4 +- opengate/managers.py | 69 +++++++++++++------ opengate/processing.py | 42 ++++++----- .../tests/src/test008_dose_actor_multiproc.py | 2 +- .../src/test080_multiprocessing_handler.py | 18 +++-- 7 files changed, 105 insertions(+), 55 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 073d9723d..fe8f473eb 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -423,13 +423,22 @@ def import_data_from_actor_output(self, *actor_output, discard_existing_data=Tru run_indices_to_import = set() for ao in actor_output: run_indices_to_import.union(ao.data_per_run.keys()) - which_output_per_run_index = dict([(r, [ao for ao in actor_output if r in ao.data_per_run]) for r in run_indices_to_import]) + which_output_per_run_index = dict( + [ + (r, [ao for ao in actor_output if r in ao.data_per_run]) + for r in run_indices_to_import + ] + ) for r in run_indices_to_import: - data_to_import = [ao.data_per_run[r] for ao in which_output_per_run_index[r]] + data_to_import = [ + ao.data_per_run[r] for ao in which_output_per_run_index[r] + ] if discard_existing_data is False and r in self.data_per_run: data_to_import.append(self.data_per_run[r]) self.data_per_run[r] = merge_data(data_to_import) - merged_data_to_import = [ao.merged_data for ao in actor_output if ao.merged_data is not None] + merged_data_to_import = [ + ao.merged_data for ao in actor_output if ao.merged_data is not None + ] if discard_existing_data is False and self.merged_data is not None: merged_data_to_import.append(self.merged_data) if len(merged_data_to_import) > 0: diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 942488dd3..df9396f0e 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -437,10 +437,14 @@ def import_user_output_from_actor(self, *actor): else: for k in self.user_output: try: - self.user_output[k].import_data_from_actor_output(*[a.user_output[k] for a in actor]) + self.user_output[k].import_data_from_actor_output( + *[a.user_output[k] for a in actor] + ) except NotImplementedError: - self.warn_user(f"User output {k} in {self.type_name} cannot be imported " - f"because the function is not yet implemented for this type of output.") + self.warn_user( + f"User output {k} in {self.type_name} cannot be imported " + f"because the function is not yet implemented for this type of output." + ) def store_output_data(self, output_name, run_index, *data): self._assert_output_exists(output_name) diff --git a/opengate/engines.py b/opengate/engines.py index 41d644d4c..afdf9eeae 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -932,7 +932,9 @@ def store_output_from_simulation_engine(self, simulation_engine): self.store_sources(simulation_engine) self.store_hook_log(simulation_engine) self.current_random_seed = simulation_engine.current_random_seed - self.expected_number_of_events = simulation_engine.source_engine.expected_number_of_events + self.expected_number_of_events = ( + simulation_engine.source_engine.expected_number_of_events + ) self.warnings = simulation_engine.simulation.warnings self.simulation_id = id(simulation_engine.simulation) diff --git a/opengate/managers.py b/opengate/managers.py index 440f6ae02..d27c9ca52 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -47,7 +47,10 @@ ) from .userinfo import UserInfo from .serialization import dump_json, dumps_json, loads_json, load_json -from .processing import dispatch_to_subprocess, MultiProcessingHandlerEqualPerRunTimingInterval +from .processing import ( + dispatch_to_subprocess, + MultiProcessingHandlerEqualPerRunTimingInterval, +) from .geometry.volumes import ( VolumeBase, @@ -1532,7 +1535,9 @@ def __getattr__(self, item): try: return self.meta_data[item] except KeyError: - raise AttributeError(f"Item {item} not found in {type(self)}, nor in the simulation meta data. ") + raise AttributeError( + f"Item {item} not found in {type(self)}, nor in the simulation meta data. " + ) def __str__(self): s = ( @@ -1734,24 +1739,30 @@ def _run_simulation_engine(self, start_new_process, process_index=None): output = se.run_engine() return output - def run_in_process(self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess): + def run_in_process( + self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess + ): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f'process_{process_index}') + self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in - self.run_timing_intervals = multi_process_handler.get_run_timing_intervals_for_process(process_index) + self.run_timing_intervals = ( + multi_process_handler.get_run_timing_intervals_for_process(process_index) + ) # adapt all dynamic volumes for vol in self.volume_manager.dynamic_volumes: vol.reassign_dynamic_params_for_process( - multi_process_handler.get_original_run_timing_indices_for_process(process_index) + multi_process_handler.get_original_run_timing_indices_for_process( + process_index + ) ) print(process_index) - print(f'Volume {vol.name}:') + print(f"Volume {vol.name}:") print(vol.user_info["dynamic_params"]) if avoid_write_to_disk_in_subprocess is True: @@ -1759,14 +1770,23 @@ def run_in_process(self, multi_process_handler, process_index, avoid_write_to_di actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print(process_index, - os.getpid(), - id(self), - multi_process_handler.get_run_timing_intervals_for_process(process_index), - multi_process_handler.get_original_run_timing_indices_for_process(process_index)) + print( + process_index, + os.getpid(), + id(self), + multi_process_handler.get_run_timing_intervals_for_process(process_index), + multi_process_handler.get_original_run_timing_indices_for_process( + process_index + ), + ) return output - def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True): + def run( + self, + start_new_process=False, + number_of_sub_processes=0, + avoid_write_to_disk_in_subprocess=True, + ): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1809,9 +1829,11 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to self.meta_data.import_from_simulation_output(output) elif number_of_sub_processes > 1: - multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', - simulation=self, - number_of_processes=number_of_sub_processes) + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( + name="multi_proc_handler", + simulation=self, + number_of_processes=number_of_sub_processes, + ) multi_proc_handler.initialize() try: multiprocessing.set_start_method("spawn") @@ -1821,15 +1843,22 @@ def run(self, start_new_process=False, number_of_sub_processes=0, avoid_write_to # q = multiprocessing.Queue() with multiprocessing.Pool(number_of_sub_processes) as pool: print("pool._outqueue: ", pool._outqueue) # DEMO - results = [pool.apply_async(self.run_in_process, - (multi_proc_handler, i, avoid_write_to_disk_in_subprocess)) for i in range(number_of_sub_processes)] + results = [ + pool.apply_async( + self.run_in_process, + (multi_proc_handler, i, avoid_write_to_disk_in_subprocess), + ) + for i in range(number_of_sub_processes) + ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f'list_of_output: {list_of_output}') + print(f"list_of_output: {list_of_output}") for actor in self.actor_manager.actors.values(): - actor.import_user_output_from_actor(*[o.get_actor(actor.name) for o in list_of_output]) + actor.import_user_output_from_actor( + *[o.get_actor(actor.name) for o in list_of_output] + ) self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): diff --git a/opengate/processing.py b/opengate/processing.py index 27ac14901..44b8e64a2 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -4,6 +4,7 @@ from .exception import fatal from .base import GateObject + # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): q.put(f(*args, **kwargs)) @@ -41,13 +42,13 @@ def _setter_hook_number_of_processes(self, number_of_processes): class MultiProcessingHandlerBase(GateObject): user_info_defaults = { - 'number_of_processes': ( + "number_of_processes": ( 1, { "doc": "In how many parallel process should the simulation be run? " - "Must be a multiple of the number of run timing intervals. ", + "Must be a multiple of the number of run timing intervals. ", "setter_hook": _setter_hook_number_of_processes, - } + }, ) } @@ -78,22 +79,21 @@ def initialize(self): self.generate_dispatch_configuration() def get_original_run_timing_indices_for_process(self, process_index): - return self.dispatch_configuration[process_index]['lut_original_rti'] + return self.dispatch_configuration[process_index]["lut_original_rti"] def get_run_timing_intervals_for_process(self, process_index): - return self.dispatch_configuration[process_index]['run_timing_intervals'] + return self.dispatch_configuration[process_index]["run_timing_intervals"] def generate_dispatch_configuration(self): raise NotImplementedError def update_process_to_run_index_maps(self): - """Creates a mapping (process index, local run index) -> (original run index) - """ + """Creates a mapping (process index, local run index) -> (original run index)""" self.assert_dispatch_configuration() p_r_map = {} for k, v in self.dispatch_configuration.items(): - for lri, ori in enumerate(v['lut_original_rti']): + for lri, ori in enumerate(v["lut_original_rti"]): p_r_map[(k, lri)] = ori # and the inverse @@ -105,32 +105,40 @@ def update_process_to_run_index_maps(self): self.inverse_process_to_run_index_map = p_r_map_inv def dispatch_to_processes(self, dispatch_function, *args): - return [dispatch_function(i, *args) for i in range(len(self.dispatch_configuration))] + return [ + dispatch_function(i, *args) for i in range(len(self.dispatch_configuration)) + ] class MultiProcessingHandlerEqualPerRunTimingInterval(MultiProcessingHandlerBase): def generate_dispatch_configuration(self): if self.number_of_processes % len(self.original_run_timing_intervals) != 0: - fatal("number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" - f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}.") + fatal( + "number_of_sub_processes must be a multiple of the number of run_timing_intervals, \n" + f"but I received {self.number_of_processes}, while there are {len(self.original_run_timing_intervals)}." + ) - number_of_processes_per_run = int(self.number_of_processes / len(self.original_run_timing_intervals)) + number_of_processes_per_run = int( + self.number_of_processes / len(self.original_run_timing_intervals) + ) dispatch_configuration = {} process_index = 0 for i, rti in enumerate(self.original_run_timing_intervals): t_start, t_end = rti duration_original = t_end - t_start duration_in_process = duration_original / number_of_processes_per_run - t_intermediate = [t_start + (j+1) * duration_in_process for j in range(number_of_processes_per_run-1)] + t_intermediate = [ + t_start + (j + 1) * duration_in_process + for j in range(number_of_processes_per_run - 1) + ] t_all = [t_start] + t_intermediate + [t_end] for t_s, t_e in zip(t_all[:-1], t_all[1:]): dispatch_configuration[process_index] = { - 'run_timing_intervals': [[t_s, t_e]], - 'lut_original_rti': [i], - 'process_id': None + "run_timing_intervals": [[t_s, t_e]], + "lut_original_rti": [i], + "process_id": None, } process_index += 1 self.dispatch_configuration = dispatch_configuration return dispatch_configuration - diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 5cf6099b6..e166c4bbc 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -87,7 +87,7 @@ stat.track_types_flag = True # start simulation - sim.run(number_of_sub_processes=4 ) + sim.run(number_of_sub_processes=4) # # print results at the end # print(stat) diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py index daabbff61..db1d18e9f 100755 --- a/opengate/tests/src/test080_multiprocessing_handler.py +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -6,11 +6,8 @@ from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s @@ -18,12 +15,13 @@ sim.run_timing_intervals = [[0 * s, 1 * s], [1 * s, 3 * s], [10 * s, 15 * s]] sim.output_dir = paths.output - box1 = sim.add_volume('BoxVolume', 'box1') - box1.add_dynamic_parametrisation(translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))]) + box1 = sim.add_volume("BoxVolume", "box1") + box1.add_dynamic_parametrisation( + translation=[[i, i, i] for i in range(len(sim.run_timing_intervals))] + ) n_proc = 4 * len(sim.run_timing_intervals) - multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval(name='multi_proc_handler', - simulation=sim, - number_of_processes=n_proc) - + multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( + name="multi_proc_handler", simulation=sim, number_of_processes=n_proc + ) From 26128571440ebd4d7b766bd03d54d25b711638a4 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 23:33:44 +0200 Subject: [PATCH 087/174] Rename FinalizeSimulation() to EndOfMultiProcessAction() --- opengate/actors/base.py | 2 +- opengate/actors/doseactors.py | 2 +- opengate/managers.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index df9396f0e..83155de8d 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -475,7 +475,7 @@ def EndSimulationAction(self): """Default virtual method for inheritance""" pass - def FinalizeSimulation(self): + def EndOfMultiProcessAction(self): pass diff --git a/opengate/actors/doseactors.py b/opengate/actors/doseactors.py index a777273c3..e71e153f8 100644 --- a/opengate/actors/doseactors.py +++ b/opengate/actors/doseactors.py @@ -254,7 +254,7 @@ def inform_user_output_about_end(self): def EndSimulationAction(self): self.inform_user_output_about_end() - def FinalizeSimulation(self): + def EndOfMultiProcessAction(self): self.inform_user_output_about_end() diff --git a/opengate/managers.py b/opengate/managers.py index d27c9ca52..891a73787 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1860,6 +1860,9 @@ def run( *[o.get_actor(actor.name) for o in list_of_output] ) + for actor in self.actor_manager.actors.values(): + actor.EndOfMultiProcessAction() + self.meta_data.import_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) @@ -1884,9 +1887,6 @@ def run( output = self._run_simulation_engine(False) self.meta_data.import_from_simulation_output(output) - for actor in self.actor_manager.actors.values(): - actor.FinalizeSimulation() - if self.store_json_archive is True: self.to_json_file() From 956be1cbf90b9a0165365ce6ab4fc1486276ca4c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 08:47:05 +0200 Subject: [PATCH 088/174] Implement explicit inplace_merge_with in ItkImageDataItem --- opengate/actors/dataitems.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 119b79698..f9e8fc645 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -268,6 +268,12 @@ def __itruediv__(self, other): self.set_data(divide_itk_images(self.data, other.data)) return self + def inplace_merge_with(self, *other): + data_to_merge = [self.data] + [o.data for o in other] + if self.data is not None: + data_to_merge += [self.data] + self.data = sum_itk_images(data_to_merge) + def set_image_properties(self, **properties): if not self.data_is_none: if "spacing" in properties and properties["spacing"] is not None: From 7bb0000491e1635b2ec2a40bdf2ecc1bbfd86340 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 08:47:58 +0200 Subject: [PATCH 089/174] Adapt merge_data to try to accelerate it (WIP) --- opengate/actors/dataitems.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index f9e8fc645..a86336ea1 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -754,8 +754,13 @@ class QuotientMeanItkImage(QuotientItkImage): def merge_data(list_of_data): merged_data = list_of_data[0] - for d in list_of_data[1:]: - merged_data.inplace_merge_with(d) + try: + print(f"DEBUG type(merged_data) = '{type(merged_data)}'") + merged_data.inplace_merge_with(*list_of_data[1:]) + except: + print("DEBUG went into except block") + for d in list_of_data[1:]: + merged_data.inplace_merge_with(d) return merged_data From d5c7c402c84a72a1f03d2ba515a0c05dd884c2cf Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 22:34:32 +0200 Subject: [PATCH 090/174] Implement sum_itk_images based on SimpleITK (workaround) --- opengate/image.py | 55 +++++++++++++++++++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 11 deletions(-) diff --git a/opengate/image.py b/opengate/image.py index 010aadb24..82cbfda59 100644 --- a/opengate/image.py +++ b/opengate/image.py @@ -12,6 +12,8 @@ ) from .definitions import __gate_list_objects__ +import SimpleITK as sitk + def update_image_py_to_cpp(py_img, cpp_img, copy_data=False): cpp_img.set_size(py_img.GetLargestPossibleRegion().GetSize()) @@ -354,17 +356,48 @@ def divide_itk_images( imgarrOut.CopyInformation(img1_numerator) return imgarrOut - -def sum_itk_images(images): - image_type = type(images[0]) - add_image_filter = itk.AddImageFilter[image_type, image_type, image_type].New() - output = images[0] - for img in images[1:]: - add_image_filter.SetInput1(output) - add_image_filter.SetInput2(img) - add_image_filter.Update() - output = add_image_filter.GetOutput() - return output +# IMPLEMENTATION BASED ON ITK +# def sum_itk_images(images): +# image_type = type(images[0]) +# add_image_filter = itk.AddImageFilter[image_type, image_type, image_type].New() +# output = images[0] +# for img in images[1:]: +# add_image_filter.SetInput1(output) +# add_image_filter.SetInput2(img) +# add_image_filter.Update() +# output = add_image_filter.GetOutput() +# return output + + +def itk_to_sitk(itk_image): + array = itk.GetArrayFromImage(itk_image) + sitk_image = sitk.GetImageFromArray(array) + sitk_image.SetOrigin(np.array(itk_image.GetOrigin())) + sitk_image.SetSpacing(np.array(itk_image.GetSpacing())) + sitk_image.SetDirection(np.array(itk_image.GetDirection()).flatten()) + return sitk_image + + +def sitk_to_itk(sitk_image): + array = sitk.GetArrayFromImage(sitk_image) # Convert SimpleITK image to NumPy array + itk_image = itk.GetImageFromArray(array) # Convert NumPy array to ITK image + + # Set the metadata from SimpleITK to ITK image + itk_image.SetOrigin(np.array(sitk_image.GetOrigin())) + itk_image.SetSpacing(np.array(sitk_image.GetSpacing())) + itk_image.SetDirection(np.array(sitk_image.GetDirection()).reshape(3, 3)) + + return itk_image + + +def sum_itk_images(itk_image_list): + if not itk_image_list: + raise ValueError("The image list is empty.") + summed_image = itk_to_sitk(itk_image_list[0]) + for itk_image in itk_image_list[1:]: + sitk_image = itk_to_sitk(itk_image) + summed_image = sitk.Add(summed_image, sitk_image) + return sitk_to_itk(summed_image) def multiply_itk_images(images): From 481dbc4eb8d2bc3cc88358c4ef54681c249426fa Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 22:35:26 +0200 Subject: [PATCH 091/174] remove debug prints and clean code --- opengate/managers.py | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 891a73787..4268abf64 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1748,7 +1748,6 @@ def run_in_process( # adapt the output_dir self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") - print("self.output_dir = ", self.output_dir) # adapt the run timing intervals in self.run_timing_intervals = ( @@ -1761,24 +1760,13 @@ def run_in_process( process_index ) ) - print(process_index) - print(f"Volume {vol.name}:") - print(vol.user_info["dynamic_params"]) if avoid_write_to_disk_in_subprocess is True: for actor in self.actor_manager.actors.values(): actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print( - process_index, - os.getpid(), - id(self), - multi_process_handler.get_run_timing_intervals_for_process(process_index), - multi_process_handler.get_original_run_timing_indices_for_process( - process_index - ), - ) + print(f"run_in_process finished in process {process_index}") return output def run( @@ -1842,7 +1830,6 @@ def run( pass # q = multiprocessing.Queue() with multiprocessing.Pool(number_of_sub_processes) as pool: - print("pool._outqueue: ", pool._outqueue) # DEMO results = [ pool.apply_async( self.run_in_process, @@ -1851,13 +1838,13 @@ def run( for i in range(number_of_sub_processes) ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object - print(results[0]) # DEMO list_of_output = [res.get() for res in results] - print(f"list_of_output: {list_of_output}") + log.info("End of multiprocessing") + # loop over actors in original simulation for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor( - *[o.get_actor(actor.name) for o in list_of_output] + *[o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process ) for actor in self.actor_manager.actors.values(): From bffb06cbb25af9f669bd1db4aaf65685e2e34a0c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 14 Oct 2024 23:58:11 +0200 Subject: [PATCH 092/174] In BaseUserInterfaceToActorOutput.__getstate__: always use return_dict = self.__dict__.copy() --- opengate/actors/actoroutput.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index fe8f473eb..6e3746b8c 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -46,11 +46,12 @@ def __getstate__(self): For earlier python version (<3.11), __getstate__ may not be defined. We provide a simple workaround here to return a copy of the internal dict. """ - try: - return_dict = super().__getstate__() - except AttributeError: - # If there is no superclass with __getstate__, use self.__dict__ - return_dict = self.__dict__.copy() + # try: + # return_dict = super().__getstate__() + # except AttributeError: + # # If there is no superclass with __getstate__, use self.__dict__ + # return_dict = self.__dict__.copy() + return_dict = self.__dict__.copy() # Safely remove 'belongs_to_actor' if it exists return_dict.pop("belongs_to_actor", None) return return_dict From fecd1e15cba6c5a91cbb25955c3f79f033fed561 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:02 +0200 Subject: [PATCH 093/174] Implement reset_user_output() and reset_data() --- opengate/actors/actoroutput.py | 4 ++++ opengate/actors/base.py | 4 ++++ opengate/actors/miscactors.py | 2 ++ opengate/managers.py | 6 ++++++ 4 files changed, 16 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 6e3746b8c..2b47f0e6e 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -344,6 +344,10 @@ def get_output_path(self, which="merged", **kwargs): def get_output_path_as_string(self, **kwargs): return ensure_filename_is_str(self.get_output_path(**kwargs)) + def reset_data(self): + self.merged_data = None + self.data_per_run = {} + def close(self): if self.keep_data_in_memory is False: self.data_per_run = {} diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 83155de8d..153d2f6a3 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -200,6 +200,10 @@ def get_data(self, name=None, **kwargs): f" Example: my_actor.{list(self.interfaces_to_user_output.keys())[0]}.get_data(). " ) + def reset_user_output(self): + for v in self.user_output.values(): + v.reset_data() + # *** shortcut properties *** @property @shortcut_for_single_output_actor diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index f580f6abc..ee56c59d8 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -59,6 +59,8 @@ def __init__(self, *args, **kwargs): # predefine the merged_data self.merged_data = Box() + + def reset_data(self): self.merged_data.runs = 0 self.merged_data.events = 0 self.merged_data.tracks = 0 diff --git a/opengate/managers.py b/opengate/managers.py index 4268abf64..1e5f51ba1 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1796,6 +1796,9 @@ def run( https://britishgeologicalsurvey.github.io/science/python-forking-vs-spawn/ """ + for actor in self.actor_manager.actors.values(): + actor.reset_user_output() + log.info("Dispatching simulation to subprocess ...") output = dispatch_to_subprocess(self._run_simulation_engine, True) @@ -1829,6 +1832,9 @@ def run( print("Could not set start method 'spawn'.") pass # q = multiprocessing.Queue() + for actor in self.actor_manager.actors.values(): + actor.reset_user_output() + with multiprocessing.Pool(number_of_sub_processes) as pool: results = [ pool.apply_async( From 845544e13fd00851d2ee3c87dc7d64b8e7911171 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:36 +0200 Subject: [PATCH 094/174] remove debug print --- opengate/actors/dataitems.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index a86336ea1..40532b7a4 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -755,10 +755,8 @@ class QuotientMeanItkImage(QuotientItkImage): def merge_data(list_of_data): merged_data = list_of_data[0] try: - print(f"DEBUG type(merged_data) = '{type(merged_data)}'") merged_data.inplace_merge_with(*list_of_data[1:]) except: - print("DEBUG went into except block") for d in list_of_data[1:]: merged_data.inplace_merge_with(d) return merged_data From 2450ef572bfb4a1aa282c497df52c45a3c2130bd Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:00:56 +0200 Subject: [PATCH 095/174] Update comment --- opengate/actors/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 153d2f6a3..f7c0c56f3 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -129,8 +129,7 @@ class ActorBase(GateObject): def __init__(self, *args, **kwargs): GateObject.__init__(self, *args, **kwargs) - # this is set by the actor engine during initialization - self.actor_engine = None + self.actor_engine = None # set by the actor engine during initializatio self.user_output = Box() self.interfaces_to_user_output = Box() From ee5b1df53e46f7d04258f2ef9bfdcc51b29e5068 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:01:28 +0200 Subject: [PATCH 096/174] Update test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 56 ++++++++++++------- 1 file changed, 35 insertions(+), 21 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index e166c4bbc..5f115e34a 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -5,6 +5,7 @@ from opengate.tests import utility from scipy.spatial.transform import Rotation from pathlib import Path +import time if __name__ == "__main__": paths = utility.get_default_test_paths(__file__, "gate_test008_dose_actor") @@ -18,7 +19,6 @@ sim.g4_verbose_level = 1 sim.visu = False sim.random_seed = 12345678 - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem # shortcuts for units m = gate.g4_units.m @@ -87,28 +87,42 @@ stat.track_types_flag = True # start simulation + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' sim.run(number_of_sub_processes=4) + t2 = time.time() + delta_t_nproc4 = t2 - t1 + + path_edep_nproc4 = dose.edep.get_output_path() + + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.run(number_of_sub_processes=1) + t2 = time.time() + delta_t_nproc1 = t2 - t1 + + path_edep_nproc1 = dose.edep.get_output_path() + + # t1 = time.time() + # sim.run(number_of_sub_processes=0) + # t2 = time.time() + # delta_t_no_subproc = t2 - t1 + + print("Simulation times: ") + print(f"One subprocess: {delta_t_nproc1}") + print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + # print(f"No subprocess: {delta_t_no_subproc}") - # # print results at the end - # print(stat) - # print(dose) - # # # tests - # stats_ref = utility.read_stat_file(ref_path / "stat.txt") - # is_ok = utility.assert_stats(stat, stats_ref, 0.11) - # - # print("\nDifference for EDEP") - # is_ok = ( - # utility.assert_images( - # ref_path / "output-Edep.mhd", - # dose.edep.get_output_path(), - # stat, - # tolerance=13, - # ignore_value=0, - # sum_tolerance=1, - # ) - # and is_ok - # ) + print("\nDifference for EDEP") + is_ok = utility.assert_images( + path_edep_nproc1, + path_edep_nproc4, + stat, + tolerance=13, + ignore_value=0, + sum_tolerance=1, + ) # # print("\nDifference for uncertainty") # is_ok = ( @@ -123,4 +137,4 @@ # and is_ok # ) # - # utility.test_ok(is_ok) + utility.test_ok(is_ok) From 2f49d873eb59efc8477151ce8a2e505cdf5c44db Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:01:56 +0200 Subject: [PATCH 097/174] Update test009_voxels_dynamic.py (not relevant for test result) --- opengate/tests/src/test009_voxels_dynamic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/tests/src/test009_voxels_dynamic.py b/opengate/tests/src/test009_voxels_dynamic.py index 459b83d12..4a27a6b17 100755 --- a/opengate/tests/src/test009_voxels_dynamic.py +++ b/opengate/tests/src/test009_voxels_dynamic.py @@ -93,6 +93,7 @@ # add dose actor dose = sim.add_actor("DoseActor", "dose") dose.output_filename = "test009-edep.mhd" + dose.edep.keep_data_per_run = True dose.attached_to = "patient" dose.size = [99, 99, 99] dose.spacing = [2 * mm, 2 * mm, 2 * mm] From 5ccba4e68de7834f4233659b5a8164c44387218f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:07:43 +0200 Subject: [PATCH 098/174] Increment random_seed in run_in_process --- opengate/managers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 1e5f51ba1..9188c7d3f 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1748,6 +1748,8 @@ def run_in_process( # adapt the output_dir self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") + if self.random_seed != 'auto': + self.random_seed += process_index # adapt the run timing intervals in self.run_timing_intervals = ( From 775ddb280db834970f1c5b4a38dd1bff0c52d5c7 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:09:34 +0200 Subject: [PATCH 099/174] Extend inplace_merge_with() to accept *other, i.e. an unpackaged list --- opengate/actors/dataitems.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 40532b7a4..d2bf76f01 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -92,12 +92,13 @@ def merge_with(self, other): f"because the following ValueError was encountered: \n{e}" ) - def inplace_merge_with(self, other): + def inplace_merge_with(self, *other): """The base class implements merging as summation. Specific classes can override this, e.g. to merge mean values. """ try: - self += other + for o in other: + self.__iadd__(o) except ValueError as e: raise NotImplementedError( f"method 'inplace_merge_with' probably not implemented for data item class {type(self)} " From ccc141d0a83863bfb085f33a2cd4bd61392e6bb5 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:09:59 +0200 Subject: [PATCH 100/174] Fix ItkImageDataItem.inplace_merge_with() --- opengate/actors/dataitems.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index d2bf76f01..6265b471b 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -270,10 +270,8 @@ def __itruediv__(self, other): return self def inplace_merge_with(self, *other): - data_to_merge = [self.data] + [o.data for o in other] - if self.data is not None: - data_to_merge += [self.data] - self.data = sum_itk_images(data_to_merge) + for o in other: + self.__iadd__(o) def set_image_properties(self, **properties): if not self.data_is_none: From 2df161a86668972818ea0b98eed021352c39234d Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:11:23 +0200 Subject: [PATCH 101/174] Fix typo in error message --- opengate/actors/dataitems.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 6265b471b..e06f35255 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -530,12 +530,12 @@ def inplace_merge_with(self, other): if (self.data[i] is None or self.data[i].data is None) is not ( other.data[i] is None or other.data[i].data is None ): - s_not = {True: "", False: "not_"} + s_not = {True: "", False: "not "} fatal( "Cannot apply inplace merge data to container " "with unset (None) data items. " - f"In this case, the inplace item {i} is {s_not[self.data[i] is None]} None, " - f"and the other item {i} is {s_not[other.data[i] is None]} None. " + f"In this case, the inplace item {i} is {s_not[self.data[i] is None]}None, " + f"and the other item {i} is {s_not[other.data[i] is None]}None. " f"This is likely an implementation error in GATE. " ) return self From 5dcb80b19d90e2090d0a568a6f8c29e652a9634f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:13:08 +0200 Subject: [PATCH 102/174] Update test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 5f115e34a..dc3caf324 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -86,23 +86,21 @@ stat = sim.add_actor("SimulationStatisticsActor", "Stats") stat.track_types_flag = True - # start simulation + # # start simulation t1 = time.time() sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' - sim.run(number_of_sub_processes=4) + path_edep_nproc4 = dose.edep.get_output_path() + sim.run(number_of_sub_processes=4, avoid_write_to_disk_in_subprocess=False) t2 = time.time() delta_t_nproc4 = t2 - t1 - path_edep_nproc4 = dose.edep.get_output_path() - - t1 = time.time() sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + path_edep_nproc1 = dose.edep.get_output_path() + t1 = time.time() sim.run(number_of_sub_processes=1) t2 = time.time() delta_t_nproc1 = t2 - t1 - path_edep_nproc1 = dose.edep.get_output_path() - # t1 = time.time() # sim.run(number_of_sub_processes=0) # t2 = time.time() @@ -111,16 +109,15 @@ print("Simulation times: ") print(f"One subprocess: {delta_t_nproc1}") print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") - # print(f"No subprocess: {delta_t_no_subproc}") + # # print(f"No subprocess: {delta_t_no_subproc}") # # tests print("\nDifference for EDEP") is_ok = utility.assert_images( path_edep_nproc1, path_edep_nproc4, - stat, tolerance=13, - ignore_value=0, + ignore_value=None, sum_tolerance=1, ) # From 79788b84496d5f7c68e2a5ae3058ea76c0eb6d72 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:14:04 +0200 Subject: [PATCH 103/174] Deprecate unused kwarg 'stat' in assert_images() --- opengate/tests/utility.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/opengate/tests/utility.py b/opengate/tests/utility.py index d1e119bb6..86e14820d 100644 --- a/opengate/tests/utility.py +++ b/opengate/tests/utility.py @@ -322,6 +322,10 @@ def assert_images( scaleImageValuesFactor=None, sad_profile_tolerance=None, ): + + if stats is not None: + DeprecationWarning("kwarg 'stats' in function assert_images is deprecated.") + # read image and info (size, spacing, etc.) ref_filename1 = ensure_filename_is_str(ref_filename1) filename2 = ensure_filename_is_str(filename2) @@ -377,11 +381,6 @@ def assert_images( print(f"Image1: {info1.size} {info1.spacing} {info1.origin} {ref_filename1}") print(f"Image2: {info2.size} {info2.spacing} {info2.origin} {filename2}") - # normalise by event - if stats is not None: - d1 = d1 / stats.counts.events - d2 = d2 / stats.counts.events - # normalize by sum of d1 s = np.sum(d2) d1 = d1 / s From 22411d4bcc6028e64398d10060027f7a7c6d373c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 13:56:56 +0200 Subject: [PATCH 104/174] Update multiproc logic in Simulation class --- opengate/managers.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 9188c7d3f..7a553f034 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1214,7 +1214,15 @@ def __init__(self, *args, simulation_output=None, **kwargs): self.number_of_sub_processes = None self.start_new_process = None if simulation_output is not None: - self.import_from_simulation_output(simulation_output) + self.extract_from_simulation_output(simulation_output) + + def reset(self): + self.reset_warnings() + self.expected_number_of_events = 0 + self.user_hook_log = [] + self.current_random_seed = None + self.number_of_sub_processes = None + self.start_new_process = None def reset_warnings(self): self.warnings = [] @@ -1227,7 +1235,7 @@ def import_from_simulation_meta_data(self, *meta_data): if self.current_random_seed is None: self.current_random_seed = m.current_random_seed - def import_from_simulation_output(self, *sim_output): + def extract_from_simulation_output(self, *sim_output): for so in sim_output: self.warnings.extend(so.warnings) self.expected_number_of_events += so.expected_number_of_events @@ -1787,9 +1795,13 @@ def run( if number_of_sub_processes == 1: start_new_process = True + self.meta_data.reset() self.meta_data.number_of_sub_processes = number_of_sub_processes self.meta_data.start_new_process = start_new_process + for actor in self.actor_manager.actors.values(): + actor.reset_user_output() + # prepare sub process if start_new_process is True: """Important: put: @@ -1798,9 +1810,6 @@ def run( https://britishgeologicalsurvey.github.io/science/python-forking-vs-spawn/ """ - for actor in self.actor_manager.actors.values(): - actor.reset_user_output() - log.info("Dispatching simulation to subprocess ...") output = dispatch_to_subprocess(self._run_simulation_engine, True) @@ -1819,7 +1828,7 @@ def run( source.fTotalSkippedEvents = s.user_info.fTotalSkippedEvents source.fTotalZeroEvents = s.user_info.fTotalZeroEvents - self.meta_data.import_from_simulation_output(output) + self.meta_data.extract_from_simulation_output(output) elif number_of_sub_processes > 1: multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( @@ -1834,8 +1843,6 @@ def run( print("Could not set start method 'spawn'.") pass # q = multiprocessing.Queue() - for actor in self.actor_manager.actors.values(): - actor.reset_user_output() with multiprocessing.Pool(number_of_sub_processes) as pool: results = [ @@ -1849,6 +1856,9 @@ def run( list_of_output = [res.get() for res in results] log.info("End of multiprocessing") + # FOR DEBUGGING. remove when ready + self.multi_proc_handler = multi_proc_handler + # loop over actors in original simulation for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor( @@ -1858,7 +1868,7 @@ def run( for actor in self.actor_manager.actors.values(): actor.EndOfMultiProcessAction() - self.meta_data.import_from_simulation_output(*list_of_output) + self.meta_data.extract_from_simulation_output(*list_of_output) for i, o in enumerate(list_of_output): self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) @@ -1880,7 +1890,7 @@ def run( # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. output = self._run_simulation_engine(False) - self.meta_data.import_from_simulation_output(output) + self.meta_data.extract_from_simulation_output(output) if self.store_json_archive is True: self.to_json_file() @@ -1893,7 +1903,7 @@ def run( print("*" * 20) print(f"{len(self.warnings)} warnings occurred in this simulation: \n") for i, w in enumerate(self.warnings): - print(f"{i+1}) " + "-" * 10) + print(f"{i + 1}) " + "-" * 10) print(w) print() print("*" * 20) From fb3628665dfe09a1836c9c75ee6df2f981edd749 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 24 Oct 2024 12:27:01 +0000 Subject: [PATCH 105/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/image.py | 1 + opengate/managers.py | 6 ++++-- .../tests/src/test008_dose_actor_multiproc.py | 18 ++++++++++-------- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/opengate/image.py b/opengate/image.py index 82cbfda59..a9407063e 100644 --- a/opengate/image.py +++ b/opengate/image.py @@ -356,6 +356,7 @@ def divide_itk_images( imgarrOut.CopyInformation(img1_numerator) return imgarrOut + # IMPLEMENTATION BASED ON ITK # def sum_itk_images(images): # image_type = type(images[0]) diff --git a/opengate/managers.py b/opengate/managers.py index 7a553f034..3a4ad7fee 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1756,7 +1756,7 @@ def run_in_process( # adapt the output_dir self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") - if self.random_seed != 'auto': + if self.random_seed != "auto": self.random_seed += process_index # adapt the run timing intervals in @@ -1862,7 +1862,9 @@ def run( # loop over actors in original simulation for actor in self.actor_manager.actors.values(): actor.import_user_output_from_actor( - *[o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process + *[ + o.get_actor(actor.name) for o in list_of_output + ] # these are the actors from the process ) for actor in self.actor_manager.actors.values(): diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index dc3caf324..7bc63ec3f 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -88,13 +88,13 @@ # # start simulation t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_4" path_edep_nproc4 = dose.edep.get_output_path() sim.run(number_of_sub_processes=4, avoid_write_to_disk_in_subprocess=False) t2 = time.time() delta_t_nproc4 = t2 - t1 - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_1" path_edep_nproc1 = dose.edep.get_output_path() t1 = time.time() sim.run(number_of_sub_processes=1) @@ -108,17 +108,19 @@ print("Simulation times: ") print(f"One subprocess: {delta_t_nproc1}") - print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + print( + f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}" + ) # # print(f"No subprocess: {delta_t_no_subproc}") # # tests print("\nDifference for EDEP") is_ok = utility.assert_images( - path_edep_nproc1, - path_edep_nproc4, - tolerance=13, - ignore_value=None, - sum_tolerance=1, + path_edep_nproc1, + path_edep_nproc4, + tolerance=13, + ignore_value=None, + sum_tolerance=1, ) # # print("\nDifference for uncertainty") From 27f42041b4a8064e45ed9c210f890617bffb2f2b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:31:56 +0200 Subject: [PATCH 106/174] Implement reset_data() in DataItem base class --- opengate/actors/dataitems.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index e06f35255..a0a948996 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -41,6 +41,9 @@ def set_data(self, data, **kwargs): def data_is_none(self): return self.data is None + def reset_data(self): + raise NotImplementedError + def _assert_data_is_not_none(self): if self.data_is_none: raise ValueError( From 994fded1c0aac68d28d0ada2fd7b3d1d52b9806a Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:32:15 +0200 Subject: [PATCH 107/174] update imports in dataitems.py --- opengate/actors/dataitems.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index a0a948996..83adeeff3 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -2,9 +2,10 @@ import numpy as np import json from box import Box +import platform from ..exception import fatal, warning, GateImplementationError -from ..utility import ensure_filename_is_str, calculate_variance +from ..utility import ensure_filename_is_str, calculate_variance, g4_units, g4_best_unit_tuple from ..image import ( sum_itk_images, divide_itk_images, @@ -14,6 +15,7 @@ write_itk_image, get_info_from_image, ) +from ..serialization import dump_json # base classes From 1e706b42a05dd609792732ade5e2e6d6210713ff Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:33:44 +0200 Subject: [PATCH 108/174] Implement StatisticsDataItem --- opengate/actors/dataitems.py | 149 +++++++++++++++++++++++++++++++++++ 1 file changed, 149 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 83adeeff3..e61720ffc 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -129,6 +129,155 @@ def number_of_samples(self, value): self.meta_data["number_of_samples"] = int(value) +class StatisticsDataItem(DataItem): + + # def __init__(self, *args, **kwargs): + # super().__init__(*args, **kwargs) + # # super leaves data=None if no data is passed as kwarg, + # # but we want to initialize with a pre-filled Box + # if self.data is None: + # self.reset_data() + + def __str__(self): + s = "" + for k, v in self.get_processed_output().items(): + if k == "track_types": + if len(v["value"]) > 0: + s += "track_types\n" + for t, n in v["value"].items(): + s += f"{' ' * 24}{t}: {n}\n" + else: + if v["unit"] is None: + unit = "" + else: + unit = str(v["unit"]) + s += f"{k}{' ' * (20 - len(k))}{v['value']} {unit}\n" + # remove last line break + return s.rstrip("\n") + + def set_data(self, data, **kwargs): + """The input data must behave like a dictionary. + """ + self.reset_data() + self.data.update(data) + + def reset_data(self): + self.data = Box() + self.data.runs = 0 + self.data.events = 0 + self.data.tracks = 0 + self.data.steps = 0 + self.data.duration = 0 + self.data.start_time = 0 + self.data.stop_time = 0 + self.data.sim_start_time = 0 + self.data.sim_stop_time = 0 + self.data.init = 0 + self.data.track_types = {} + self.data.nb_threads = 1 + + def inplace_merge_with(self, *other): + if self.data is None: + self.reset_data() + for o in other: + self.data.runs += o.data.runs + self.data.events += o.data.events + self.data.steps += o.data.steps + self.data.tracks += o.data.tracks + self.data.duration += o.data.duration + self.data.init += o.data.init + + common_entries = set(self.data.track_types.keys()).intersection(o.data.track_types.keys()) + new_entries = set(o.data.track_types.keys()).difference(self.data.track_types.keys()) + for k in common_entries: + self.data.track_types[k] += o.data.track_types[k] + for k in new_entries: + self.data.track_types[k] = o.data.track_types[k] + + # self.data.start_time = 0 + # self.data.stop_time = 0 + # self.data.sim_start_time = 0 + # self.data.sim_stop_time = 0 + + @property + def pps(self): + if self.data.duration != 0: + return int( + self.data.events / (self.data.duration / g4_units.s) + ) + else: + return 0 + + @property + def tps(self): + if self.data.duration != 0: + return int( + self.data.tracks / (self.data.duration / g4_units.s) + ) + else: + return 0 + + @property + def sps(self): + if self.data.duration != 0: + return int( + self.data.steps / (self.data.duration / g4_units.s) + ) + else: + return 0 + + def get_processed_output(self): + d = {} + d["runs"] = {"value": self.data.runs, "unit": None} + d["events"] = {"value": self.data.events, "unit": None} + d["tracks"] = {"value": self.data.tracks, "unit": None} + d["steps"] = {"value": self.data.steps, "unit": None} + val, unit = g4_best_unit_tuple(self.data.init, "Time") + d["init"] = { + "value": val, + "unit": unit, + } + val, unit = g4_best_unit_tuple(self.data.duration, "Time") + d["duration"] = { + "value": val, + "unit": unit, + } + d["pps"] = {"value": self.pps, "unit": None} + d["tps"] = {"value": self.tps, "unit": None} + d["sps"] = {"value": self.sps, "unit": None} + d["start_time"] = { + "value": self.data.start_time, + "unit": None, + } + d["stop_time"] = { + "value": self.data.stop_time, + "unit": None, + } + val, unit = g4_best_unit_tuple(self.data.sim_start_time, "Time") + d["sim_start_time"] = { + "value": val, + "unit": unit, + } + val, unit = g4_best_unit_tuple(self.data.sim_stop_time, "Time") + d["sim_stop_time"] = { + "value": val, + "unit": unit, + } + d["threads"] = {"value": self.data.nb_threads, "unit": None} + d["arch"] = {"value": platform.system(), "unit": None} + d["python"] = {"value": platform.python_version(), "unit": None} + d["track_types"] = {"value": self.data.track_types, "unit": None} + return d + + def write(self, path, encoder="json", **kwargs): + """Override virtual method from base class.""" + with open(path, "w+") as f: + if encoder == "json": + dump_json(self.get_processed_output(), f, indent=4) + else: + f.write(self.__str__()) + + class MeanValueDataItemMixin: """This class cannot be instantiated on its own. It is solely meant to be mixed into a class that inherits from DataItem (or daughters). From b17a98c185dc91e85a5e2f5969b2c808ec8accbd Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:33:57 +0200 Subject: [PATCH 109/174] Implement StatisticsItemContainer --- opengate/actors/dataitems.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index e61720ffc..155381869 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -905,6 +905,16 @@ class QuotientMeanItkImage(QuotientItkImage): ) +class StatisticsItemContainer(DataItemContainer): + + _data_item_classes = (StatisticsDataItem,) + default_data_item_config = Box( + { + 0: Box({"output_filename": "auto", "write_to_disk": False, "active": True}), + } + ) + + def merge_data(list_of_data): merged_data = list_of_data[0] try: From 09a95c40257a13e50ea0b9b2befbfe5b4709c442 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:35:39 +0200 Subject: [PATCH 110/174] Let SimulationStatisticsActor use new ActorOutputStatisticsActor --- opengate/actors/miscactors.py | 220 ++++------------------------------ 1 file changed, 21 insertions(+), 199 deletions(-) diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index ee56c59d8..a198e09da 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -2,191 +2,11 @@ import platform import opengate_core as g4 from .base import ActorBase -from ..utility import g4_units, g4_best_unit_tuple -from .actoroutput import ActorOutputBase -from ..serialization import dump_json -from ..exception import warning +from ..utility import g4_units +from .actoroutput import ActorOutputStatisticsActor from ..base import process_cls -def _setter_hook_stats_actor_output_filename(self, output_filename): - # By default, write_to_disk is False. - # However, if user actively sets the output_filename - # s/he most likely wants to write to disk also - if output_filename != "" and output_filename is not None: - self.write_to_disk = True - return output_filename - - -class ActorOutputStatisticsActor(ActorOutputBase): - """This is a hand-crafted ActorOutput specifically for the SimulationStatisticsActor.""" - - # hints for IDE - encoder: str - output_filename: str - write_to_disk: bool - - user_info_defaults = { - "encoder": ( - "json", - { - "doc": "How should the output be encoded?", - "allowed_values": ("json", "legacy"), - }, - ), - "output_filename": ( - "auto", - { - "doc": "Filename for the data represented by this actor output. " - "Relative paths and filenames are taken " - "relative to the global simulation output folder " - "set via the Simulation.output_dir option. ", - "setter_hook": _setter_hook_stats_actor_output_filename, - }, - ), - "write_to_disk": ( - False, - { - "doc": "Should the output be written to disk, or only kept in memory? ", - }, - ), - } - - default_suffix = "json" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # predefine the merged_data - self.merged_data = Box() - - def reset_data(self): - self.merged_data.runs = 0 - self.merged_data.events = 0 - self.merged_data.tracks = 0 - self.merged_data.steps = 0 - self.merged_data.duration = 0 - self.merged_data.start_time = 0 - self.merged_data.stop_time = 0 - self.merged_data.sim_start_time = 0 - self.merged_data.sim_stop_time = 0 - self.merged_data.init = 0 - self.merged_data.track_types = {} - self.merged_data.nb_threads = 1 - - @property - def pps(self): - if self.merged_data.duration != 0: - return int( - self.merged_data.events / (self.merged_data.duration / g4_units.s) - ) - else: - return 0 - - @property - def tps(self): - if self.merged_data.duration != 0: - return int( - self.merged_data.tracks / (self.merged_data.duration / g4_units.s) - ) - else: - return 0 - - @property - def sps(self): - if self.merged_data.duration != 0: - return int( - self.merged_data.steps / (self.merged_data.duration / g4_units.s) - ) - else: - return 0 - - def store_data(self, data, **kwargs): - self.merged_data.update(data) - - def get_data(self, **kwargs): - if "which" in kwargs and kwargs["which"] != "merged": - warning( - f"The statistics actor output only stores merged data currently. " - f"The which={kwargs['which']} you provided will be ignored. " - ) - # the statistics actor currently only handles merged data, so we return it - # no input variable 'which' as in other output classes - return self.merged_data - - def get_processed_output(self): - d = {} - d["runs"] = {"value": self.merged_data.runs, "unit": None} - d["events"] = {"value": self.merged_data.events, "unit": None} - d["tracks"] = {"value": self.merged_data.tracks, "unit": None} - d["steps"] = {"value": self.merged_data.steps, "unit": None} - val, unit = g4_best_unit_tuple(self.merged_data.init, "Time") - d["init"] = { - "value": val, - "unit": unit, - } - val, unit = g4_best_unit_tuple(self.merged_data.duration, "Time") - d["duration"] = { - "value": val, - "unit": unit, - } - d["pps"] = {"value": self.pps, "unit": None} - d["tps"] = {"value": self.tps, "unit": None} - d["sps"] = {"value": self.sps, "unit": None} - d["start_time"] = { - "value": self.merged_data.start_time, - "unit": None, - } - d["stop_time"] = { - "value": self.merged_data.stop_time, - "unit": None, - } - val, unit = g4_best_unit_tuple(self.merged_data.sim_start_time, "Time") - d["sim_start_time"] = { - "value": val, - "unit": unit, - } - val, unit = g4_best_unit_tuple(self.merged_data.sim_stop_time, "Time") - d["sim_stop_time"] = { - "value": val, - "unit": unit, - } - d["threads"] = {"value": self.merged_data.nb_threads, "unit": None} - d["arch"] = {"value": platform.system(), "unit": None} - d["python"] = {"value": platform.python_version(), "unit": None} - d["track_types"] = {"value": self.merged_data.track_types, "unit": None} - return d - - def __str__(self): - s = "" - for k, v in self.get_processed_output().items(): - if k == "track_types": - if len(v["value"]) > 0: - s += "track_types\n" - for t, n in v["value"].items(): - s += f"{' ' * 24}{t}: {n}\n" - else: - if v["unit"] is None: - unit = "" - else: - unit = str(v["unit"]) - s += f"{k}{' ' * (20 - len(k))}{v['value']} {unit}\n" - # remove last line break - return s.rstrip("\n") - - def write_data(self, **kwargs): - """Override virtual method from base class.""" - with open(self.get_output_path(which="merged"), "w+") as f: - if self.encoder == "json": - dump_json(self.get_processed_output(), f, indent=4) - else: - f.write(self.__str__()) - - def write_data_if_requested(self, **kwargs): - if self.write_to_disk is True: - self.write_data(**kwargs) - - class SimulationStatisticsActor(ActorBase, g4.GateSimulationStatisticsActor): """ Store statistics about a simulation run. @@ -207,6 +27,7 @@ class SimulationStatisticsActor(ActorBase, g4.GateSimulationStatisticsActor): def __init__(self, *args, **kwargs): ActorBase.__init__(self, *args, **kwargs) self._add_user_output(ActorOutputStatisticsActor, "stats") + self.user_output.stats.set_write_to_disk(False) self.__initcpp__() def __initcpp__(self): @@ -217,13 +38,6 @@ def __str__(self): s = self.user_output["stats"].__str__() return s - @property - def counts(self): - return self.user_output.stats.merged_data - - def store_output_data(self, output_name, run_index, *data): - raise NotImplementedError - def initialize(self): ActorBase.initialize(self) self.InitializeUserInput(self.user_info) @@ -231,13 +45,17 @@ def initialize(self): def StartSimulationAction(self): g4.GateSimulationStatisticsActor.StartSimulationAction(self) - self.user_output.stats.merged_data.nb_threads = ( - self.simulation.number_of_threads - ) + # self.user_output.stats.merged_data.nb_threads = ( + # self.simulation.number_of_threads + # ) + + # def EndOfRunActionMasterThread(self, run_index): + # self.user_output.stats.store_data() def EndSimulationAction(self): g4.GateSimulationStatisticsActor.EndSimulationAction(self) - self.user_output.stats.store_data(self.GetCounts()) + + data = dict([(k, v) for k, v in self.GetCounts().items()]) if self.simulation is not None: sim_start = self.simulation.run_timing_intervals[0][0] @@ -249,18 +67,22 @@ def EndSimulationAction(self): else: sim_stop = 0 - self.user_output.stats.store_data( - {"sim_start": sim_start, "sim_stop": sim_stop} - ) - self.user_output.stats.merged_data.sim_start_time = ( + data["sim_start"] = sim_start + data["sim_stop"] = sim_stop + data["sim_start_time"] = ( self.simulation.run_timing_intervals[0][0] ) - self.user_output.stats.merged_data.sim_stop_time = ( + data["sim_stop_time"] = ( self.simulation.run_timing_intervals[-1][1] ) - self.user_output.stats.merged_data.nb_threads = ( + data["nb_threads"] = ( self.simulation.number_of_threads ) + self.user_output.stats.store_data("merged", data) + + self.user_output.stats.write_data_if_requested() + + def EndOfMultiProcessAction(self): self.user_output.stats.write_data_if_requested() From a2cd755aaab6504037e2c5979b67ff64dab3379b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:36:53 +0200 Subject: [PATCH 111/174] Implement ActorOutputStatisticsActor --- opengate/actors/actoroutput.py | 64 ++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 2b47f0e6e..cd4030be4 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -10,6 +10,7 @@ SingleItkImageWithVariance, QuotientItkImage, QuotientMeanItkImage, + StatisticsItemContainer, merge_data, ) @@ -804,6 +805,69 @@ class ActorOutputQuotientMeanImage(ActorOutputImage): data_container_class = QuotientMeanItkImage +def _setter_hook_encoder(self, value): + if value == "encoder": + self.default_suffix = "json" + else: + self.default_suffix = "txt" + return value + + +class ActorOutputStatisticsActor(ActorOutputUsingDataItemContainer): + """This is a hand-crafted ActorOutput specifically for the SimulationStatisticsActor.""" + + _default_interface_class = UserInterfaceToActorOutputStatisticsActor + data_container_class = StatisticsItemContainer + + # hints for IDE + encoder: str + + user_info_defaults = { + "encoder": ( + "json", + { + "doc": "How should the output be encoded?", + "allowed_values": ("json", "legacy"), + }, + ), + # "output_filename": ( + # "auto", + # { + # "doc": "Filename for the data represented by this actor output. " + # "Relative paths and filenames are taken " + # "relative to the global simulation output folder " + # "set via the Simulation.output_dir option. ", + # "setter_hook": _setter_hook_stats_actor_output_filename, + # }, + # ), + # "write_to_disk": ( + # False, + # { + # "doc": "Should the output be written to disk, or only kept in memory? ", + # }, + # ), + } + + def __init__(self, *args, **kwargs): + self.default_suffix = 'json' + super().__init__(*args, **kwargs) + + def initialize(self): + output_filename = self.get_output_filename() + if output_filename != "" and output_filename is not None: + self.set_write_to_disk(True) + super().initialize() + + # def store_data(self, data, **kwargs): + # self.merged_data.update(data) + # + def __str__(self): + if self.merged_data is not None: + return self.merged_data.__str__() + else: + return "No data found. " + + class ActorOutputRoot(ActorOutputBase): # hints for IDE From abdaacd40a89753f41ae30aa1061318371c95516 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 24 Oct 2024 19:37:40 +0200 Subject: [PATCH 112/174] Implement UserInterfaceToActorOutputStatisticsActor --- opengate/actors/actoroutput.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index cd4030be4..5fde6ca63 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -176,6 +176,13 @@ def image(self): return self._user_output.get_data(**self._kwargs_for_interface_calls) +class UserInterfaceToActorOutputStatisticsActor(UserInterfaceToActorOutputUsingDataItemContainer): + + @property + def counts(self): + return self._user_output.get_data(which="merged", **self._kwargs_for_interface_calls) + + def _setter_hook_belongs_to(self, belongs_to): if belongs_to is None: fatal("The belongs_to attribute of an ActorOutput cannot be None.") From 37b6bbab8509cef57a55ed14fcec8d820a545d80 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 25 Oct 2024 16:23:59 +0200 Subject: [PATCH 113/174] let default_suffix be an instance attribute of the ActorOutput classes rather than a class attribute. --- opengate/actors/actoroutput.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 5fde6ca63..d76664e7f 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -211,7 +211,6 @@ class ActorOutputBase(GateObject): keep_data_in_memory: bool _default_interface_class = BaseUserInterfaceToActorOutput - default_suffix = None user_info_defaults = { "belongs_to": ( @@ -245,6 +244,8 @@ def __init__(self, *args, **kwargs): self.data_per_run = {} # holds the data per run in memory self.merged_data = None # holds the data merged from multiple runs in memory + self.default_suffix = "" + # internal flag which can set by the actor when it creating an actor output # via _add_actor_output # __can_be_deactivated = False forces the "active" user info to True From 3b0e49937af81abdb17e7d9c50977218878ed955 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 25 Oct 2024 16:24:51 +0200 Subject: [PATCH 114/174] Implement ActorOutputUsingDataItemContainer.reset_data() --- opengate/actors/actoroutput.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index d76664e7f..aecd074d0 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -646,6 +646,19 @@ def get_data_container(self, which): f"Allowed values are: 'merged' or a valid run_index. " ) + def reset_data(self): + # try to delegate the reset to the data item container (and the data items in them) + try: + if self.merged_data is not None: + self.merged_data.reset_data() + for v in self.data_per_run.values(): + if v is not None: + v.reset_data() + # if they do not implement the reset_data() method, + # fallback to the simple reset from the super class + except NotImplementedError: + super().reset_data() + def get_data(self, which="merged", item=0): container = self.get_data_container(which) if container is None: From 6ce548c2318f5382bdd8d91ca01e5060474cf8e6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 25 Oct 2024 16:25:10 +0200 Subject: [PATCH 115/174] Add process_cls(ActorOutputStatisticsActor) in actoroutput.py --- opengate/actors/actoroutput.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index aecd074d0..1eec00e9c 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -963,4 +963,5 @@ def initialize_cpp_parameters(self): process_cls(ActorOutputSingleImageWithVariance) process_cls(ActorOutputQuotientImage) process_cls(ActorOutputQuotientMeanImage) +process_cls(ActorOutputStatisticsActor) process_cls(ActorOutputRoot) From fd4d7fc80af7421c7e0dc49c524f99bcde12654b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 25 Oct 2024 16:25:34 +0200 Subject: [PATCH 116/174] Remove unused store_output_data() method --- opengate/actors/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index f7c0c56f3..e10f923a5 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -449,9 +449,9 @@ def import_user_output_from_actor(self, *actor): f"because the function is not yet implemented for this type of output." ) - def store_output_data(self, output_name, run_index, *data): - self._assert_output_exists(output_name) - self.user_output[output_name].store_data(run_index, *data) + # def store_output_data(self, output_name, run_index, *data): + # self._assert_output_exists(output_name) + # self.user_output[output_name].store_data(run_index, *data) def write_output_to_disk_if_requested(self, output_name): self._assert_output_exists(output_name) From e1a6713b22a036e1287df034ed58826d325319c7 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 25 Oct 2024 16:25:55 +0200 Subject: [PATCH 117/174] Update test008_dose_actor_multiproc.py --- opengate/tests/src/test008_dose_actor_multiproc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 7bc63ec3f..cd2e8bde5 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -83,8 +83,10 @@ dose.output_filename = "test.nii.gz" # add stat actor - stat = sim.add_actor("SimulationStatisticsActor", "Stats") - stat.track_types_flag = True + stat_actor = sim.add_actor("SimulationStatisticsActor", "Stats") + stat_actor.write_to_disk = True + stat_actor.output_filename = "stats.json" + stat_actor.track_types_flag = True # # start simulation t1 = time.time() @@ -119,7 +121,6 @@ path_edep_nproc1, path_edep_nproc4, tolerance=13, - ignore_value=None, sum_tolerance=1, ) # From 3c4e7b52468b7f00ae66f0b57f1838c667563b93 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:27:24 +0000 Subject: [PATCH 118/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/actors/actoroutput.py | 10 +++++++--- opengate/actors/dataitems.py | 30 ++++++++++++++++-------------- opengate/actors/miscactors.py | 14 ++++---------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 1eec00e9c..44e5a2476 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -176,11 +176,15 @@ def image(self): return self._user_output.get_data(**self._kwargs_for_interface_calls) -class UserInterfaceToActorOutputStatisticsActor(UserInterfaceToActorOutputUsingDataItemContainer): +class UserInterfaceToActorOutputStatisticsActor( + UserInterfaceToActorOutputUsingDataItemContainer +): @property def counts(self): - return self._user_output.get_data(which="merged", **self._kwargs_for_interface_calls) + return self._user_output.get_data( + which="merged", **self._kwargs_for_interface_calls + ) def _setter_hook_belongs_to(self, belongs_to): @@ -870,7 +874,7 @@ class ActorOutputStatisticsActor(ActorOutputUsingDataItemContainer): } def __init__(self, *args, **kwargs): - self.default_suffix = 'json' + self.default_suffix = "json" super().__init__(*args, **kwargs) def initialize(self): diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 155381869..12e5d898e 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -5,7 +5,12 @@ import platform from ..exception import fatal, warning, GateImplementationError -from ..utility import ensure_filename_is_str, calculate_variance, g4_units, g4_best_unit_tuple +from ..utility import ( + ensure_filename_is_str, + calculate_variance, + g4_units, + g4_best_unit_tuple, +) from ..image import ( sum_itk_images, divide_itk_images, @@ -156,8 +161,7 @@ def __str__(self): return s.rstrip("\n") def set_data(self, data, **kwargs): - """The input data must behave like a dictionary. - """ + """The input data must behave like a dictionary.""" self.reset_data() self.data.update(data) @@ -187,8 +191,12 @@ def inplace_merge_with(self, *other): self.data.duration += o.data.duration self.data.init += o.data.init - common_entries = set(self.data.track_types.keys()).intersection(o.data.track_types.keys()) - new_entries = set(o.data.track_types.keys()).difference(self.data.track_types.keys()) + common_entries = set(self.data.track_types.keys()).intersection( + o.data.track_types.keys() + ) + new_entries = set(o.data.track_types.keys()).difference( + self.data.track_types.keys() + ) for k in common_entries: self.data.track_types[k] += o.data.track_types[k] for k in new_entries: @@ -202,27 +210,21 @@ def inplace_merge_with(self, *other): @property def pps(self): if self.data.duration != 0: - return int( - self.data.events / (self.data.duration / g4_units.s) - ) + return int(self.data.events / (self.data.duration / g4_units.s)) else: return 0 @property def tps(self): if self.data.duration != 0: - return int( - self.data.tracks / (self.data.duration / g4_units.s) - ) + return int(self.data.tracks / (self.data.duration / g4_units.s)) else: return 0 @property def sps(self): if self.data.duration != 0: - return int( - self.data.steps / (self.data.duration / g4_units.s) - ) + return int(self.data.steps / (self.data.duration / g4_units.s)) else: return 0 diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index a198e09da..dbab1679a 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -50,7 +50,7 @@ def StartSimulationAction(self): # ) # def EndOfRunActionMasterThread(self, run_index): - # self.user_output.stats.store_data() + # self.user_output.stats.store_data() def EndSimulationAction(self): g4.GateSimulationStatisticsActor.EndSimulationAction(self) @@ -69,15 +69,9 @@ def EndSimulationAction(self): data["sim_start"] = sim_start data["sim_stop"] = sim_stop - data["sim_start_time"] = ( - self.simulation.run_timing_intervals[0][0] - ) - data["sim_stop_time"] = ( - self.simulation.run_timing_intervals[-1][1] - ) - data["nb_threads"] = ( - self.simulation.number_of_threads - ) + data["sim_start_time"] = self.simulation.run_timing_intervals[0][0] + data["sim_stop_time"] = self.simulation.run_timing_intervals[-1][1] + data["nb_threads"] = self.simulation.number_of_threads self.user_output.stats.store_data("merged", data) self.user_output.stats.write_data_if_requested() From 51733c1e8b8cf56540fde4b7748b8af43f00cf5e Mon Sep 17 00:00:00 2001 From: David Sarrut Date: Fri, 25 Oct 2024 10:14:47 +0200 Subject: [PATCH 119/174] Update physics and sources documentation Updated the user guide for physics and sources to use new method names for accessing physics settings and organizing content. Introduced subheadings for better structure and improved clarity. Corrected minor grammatical issues. --- docs/source/user_guide/user_guide_actors.md | 2 +- docs/source/user_guide/user_guide_physics.md | 71 ++++++++++++-------- docs/source/user_guide/user_guide_sources.md | 16 ++++- 3 files changed, 60 insertions(+), 29 deletions(-) diff --git a/docs/source/user_guide/user_guide_actors.md b/docs/source/user_guide/user_guide_actors.md index 77d5cc8bc..4e36d5146 100644 --- a/docs/source/user_guide/user_guide_actors.md +++ b/docs/source/user_guide/user_guide_actors.md @@ -1,3 +1,3 @@ ## Actors and Filters -The "Actors" are scorers can store information during simulation such as dose map or phase-space (like a "tally" in MCNPX). They can also be used to modify the behavior of a simulation, such as the `MotionActor` that allows to move volumes, this is why they are called "actor". +The "Actors" are scorers that can store information during simulation, such as dose map or phase-space (like a "tally" in MCNPX). They can also be used to modify the behavior of a simulation, such as the `KillActor` that allows to stop tracking particles when they reach some defined regions, this is why they are called "actors" rather than "scorers". diff --git a/docs/source/user_guide/user_guide_physics.md b/docs/source/user_guide/user_guide_physics.md index 17ee17439..75d01329b 100644 --- a/docs/source/user_guide/user_guide_physics.md +++ b/docs/source/user_guide/user_guide_physics.md @@ -9,8 +9,7 @@ detailed explanation. The user can select the physics list with the following: ```python # Assume that sim is a simulation -phys = sim.get_physics_info() -phys.name = 'QGSP_BERT_EMZ' +sim.physics_manager.physics_list_name = 'QGSP_BERT_EMZ' ``` The default physics list is QGSP_BERT_EMV. The Geant4 standard physics list are composed of a first part: @@ -83,9 +82,7 @@ Note that EMV, EMX, EMY, EMZ corresponds to option1, 2, 3, 4 (don't ask us why). The decay process, if needed, must be added explicitly. This is done with: ```python -sim.enable_decay(True) -# or -sim.physics_manager = True # zxc This behavior is never used in any of the tests and seems like a bug since printing the physics_manager of a test result in a fancy dictionnary. +sim.physics_manager.enable_decay(True) ``` Under the hood, this will add two processed to the Geant4 list of processes, G4DecayPhysics and G4RadioactiveDecayPhysics. Those processes are required in particular if decaying generic ion (such as F18) is used as source. Additional information can be found in the following: @@ -143,7 +140,7 @@ sim.physics_manager.mean_energy_per_ion_pair[material_of_interest] = 5.0 * eV ``` #### Back-to-back source -Currently, simulating this behavior cannot be reproduced with back-to-back source +Currently, simulating this behavior cannot be reproduced (yet!) with back-to-back source. #### Further considerations The property needed to simulate acollinearity, as expected in PET imaging, is defined at the level of materials, not at the volume level. @@ -282,27 +279,35 @@ Users of Gate need to specify four properties to define the fluorescent material #### Simulation of the Fluorescein -```xml We define the refractive index of the fluorophore’s environment (water or alcohol): + +```xml - - - - + + + + -``` + -```xml + + - - - + + + -We describe the fluorescein Emission spectrum taken from measurements or literature as function of the photon energy: + + @@ -497,21 +502,33 @@ The hits generated by the detection of the optical photons are generated in the ## Electromagnetic parameters -WARNING : this part is work in progress. DO NOT USE YET. - Electromagnetic parameters are managed by a specific Geant4 object called G4EmParameters. It is available with the following: ```python -phys = sim.get_physics_info() -em = phys.g4_em_parameters -em.SetFluo(True) -em.SetAuger(True) -em.SetAugerCascade(True) -em.SetPixe(True) -em.SetDeexActiveRegion('world', True, True, True) +sim.physics_manager.em_parameters.fluo = True +sim.physics_manager.em_parameters.auger = True +sim.physics_manager.em_parameters.auger_cascade = True +sim.physics_manager.em_parameters.pixe = True +sim.physics_manager.em_parameters.deexcitation_ignore_cut = True +``` + +It is possible to enable/disable some physics options within a given region that need to be defined before by associating the region with a volume: + +```python +region_b1 = sim.physics_manager.add_region("region_b1") +region_b1.em_switches.deex = True +region_b1.em_switches.auger = False +region_b1.associate_volume(b1) +``` + +Or for the world: +```python +sim.physics_manager.em_switches_world.deex = True +sim.physics_manager.em_switches_world.auger = True +sim.physics_manager.em_switches_world.pixe = True ``` -WARNING: it must be set **after** the initialization (after `sim.initialize()` and before `output = sim.start()`). +See test063. The complete description is available in this page: diff --git a/docs/source/user_guide/user_guide_sources.md b/docs/source/user_guide/user_guide_sources.md index 13a6c392d..51e85c9dd 100644 --- a/docs/source/user_guide/user_guide_sources.md +++ b/docs/source/user_guide/user_guide_sources.md @@ -48,7 +48,12 @@ source.energy.mono = 80 * MeV source.energy.sigma_gauss = 1 * MeV ``` -All parameters are stored into a dict-like structure (a Box). Particle can be 'gamma', 'e+', 'e-', 'proton' (all Geant4 names). The number of particles that will be generated by the source can be described by an activity `source.activity = 10 * MBq` or by a number of particle `source.n = 100`. The activity may be automatically decreased according to an exponential decay by setting the half-life `source.half_life = 60 * sec`. Alternatively, user can provide a TAC (Time Activity Curve) by means of two vectors (times and activities) : +All parameters are stored into a dict-like structure (a Box). Particle can be 'gamma', 'e+', 'e-', 'proton' (all Geant4 names). The number of particles that will be generated by the source can be described by an activity `source.activity = 10 * MBq` or by a number of particle `source.n = 100`. + +#### Half-life and Time Activity Curves (TAC) + +The activity may be automatically decreased according to an exponential decay by setting the half-life `source.half_life = 60 * sec`. Alternatively, user can provide a TAC (Time Activity Curve) by means of two vectors (times and activities) : + ```python starting_activity = 1000 * Bq half_life = 2 * sec @@ -67,6 +72,8 @@ The energy can be defined by a single value ('mono') or Gaussian ('gauss'). The `mother` option indicate the coordinate system of the source. By default, it is the world, but it is possible to attach a source to any volume. In that case, the coordinate system of all emitted particles will follow the given volume. +#### Direction and Acceptance Angle + It is possible to indicate a `angle_acceptance_volume` to the direction of a source. In that case, the particle will be created only if their position & direction make them intersect the given volume. This is for example useful for SPECT imaging in order to limit the particle creation to the ones that will have a chance to reach the detector. Note that the particles that will not intersect the volume will be created anyway but with a zero energy (so not tracked). This mechanism ensures to remain consistent with the required activity and timestamps of the particles, there is no need to scale with the solid angle. See for example `test028` test files for more details. Using `direction.type = 'iso'`, the directions given to primary particles depends on 𝜃 and 𝜙 angles in a [spherical coordinate system](https://en.wikipedia.org/wiki/Spherical_coordinate_system). @@ -87,6 +94,8 @@ So 𝜃 is the angle in XOZ plane, from -Z to -X; and 𝜙 is the angle in XOY p ![](../figures/thetaphi.png) +#### Ion source + Source of ion can be set with the following (see `test013`) ```python @@ -96,6 +105,8 @@ source2 = sim.add_source('Generic', 'ion2') source2.particle = 'ion 53 124' # Iodine 124 ``` +#### Predefined energy spectrum for beta+ + There is some predefined energy spectrum of positron (e+): ```python @@ -106,6 +117,9 @@ source.energy.type = 'F18' # F18 or Ga68 or C11 ... It means the positrons will be generated following the (approximated) energy spectrum of the F18 ion. Source code is `GateSPSEneDistribution.cpp`. Energy spectrum for beta+ emitters are available : F18, Ga68, Zr89, Na22, C11, N13, O15, Rb82. See [http://www.lnhb.fr/nuclear-data/module-lara](http://www.lnhb.fr/nuclear-data/module-lara). One example is available in `test031`. + +#### Confined source + There is a `confine` option that allows to generate particles only if their starting position is within a given volume. See `phantom_nema_iec_body` in the contrib folder. Note that the source volume MUST be larger than the volume it is confined in. Also, note that no particle source will be generated in the daughters of the confine volume. All options have a default values and can be printed with `print(source)`. From d76b64f533336048e2d7463fe910408a77c9d180 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 10 Oct 2024 01:49:36 +0200 Subject: [PATCH 120/174] Add test080_multiprocessing_1.py --- opengate/tests/src/test080_multiprocessing_1.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index 90bf61400..d5f93aa94 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -5,8 +5,11 @@ from opengate.tests.utility import get_default_test_paths + if __name__ == "__main__": - paths = get_default_test_paths(__file__, output_folder="test080") + paths = get_default_test_paths( + __file__, output_folder="test080" + ) s = g4_units.s From 98752bf1b53f1328854a0d9cc44a258b30bc7137 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 23:54:15 +0000 Subject: [PATCH 121/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/managers.py | 8 ++++++-- opengate/tests/src/test080_multiprocessing_1.py | 5 +---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 3a4ad7fee..b8d9d93be 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1776,7 +1776,9 @@ def run_in_process( actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print(f"run_in_process finished in process {process_index}") + print( + f"run_in_process finished in process {process_index}" + ) return output def run( @@ -1834,7 +1836,9 @@ def run( multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( name="multi_proc_handler", simulation=self, - number_of_processes=number_of_sub_processes, + number_of_processes= + number_of_sub_processes + , ) multi_proc_handler.initialize() try: diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test080_multiprocessing_1.py index d5f93aa94..90bf61400 100755 --- a/opengate/tests/src/test080_multiprocessing_1.py +++ b/opengate/tests/src/test080_multiprocessing_1.py @@ -5,11 +5,8 @@ from opengate.tests.utility import get_default_test_paths - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s From 8217a0b246f204162a1acc9256cebe2532c57854 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:08 +0200 Subject: [PATCH 122/174] Implement MultiProcessingHandler classes --- opengate/processing.py | 1 - 1 file changed, 1 deletion(-) diff --git a/opengate/processing.py b/opengate/processing.py index 44b8e64a2..c8fd0f6fd 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -4,7 +4,6 @@ from .exception import fatal from .base import GateObject - # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): q.put(f(*args, **kwargs)) From 31938906896ca12c3710ba2f986dffa46a8900f6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 11 Oct 2024 12:13:21 +0200 Subject: [PATCH 123/174] create test080_multiprocessing_handler.py --- opengate/tests/src/test080_multiprocessing_handler.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py index db1d18e9f..26780a69f 100755 --- a/opengate/tests/src/test080_multiprocessing_handler.py +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -6,8 +6,11 @@ from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval + if __name__ == "__main__": - paths = get_default_test_paths(__file__, output_folder="test080") + paths = get_default_test_paths( + __file__, output_folder="test080" + ) s = g4_units.s From 552fae401ca72312665e7fd3efe4e28abdae1008 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:17:09 +0200 Subject: [PATCH 124/174] Implement FinalizeSimulation() in VoxelDepositActor --- opengate/actors/doseactors.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/opengate/actors/doseactors.py b/opengate/actors/doseactors.py index e71e153f8..dcbdaede4 100644 --- a/opengate/actors/doseactors.py +++ b/opengate/actors/doseactors.py @@ -405,20 +405,6 @@ class DoseActor(VoxelDepositActor, g4.GateDoseActor): ), }, ), - # "calculate_density_from": ( - # "auto", - # { - # "doc": "How should density be calculated?\n" - # "'simulation': via scoring along with the rest of the quantities.\n" - # "'image': from the CT image, if the actor is attached to an ImageVolume.\n" - # "'auto' (default): Let GATE pick the right one for you. ", - # "allowed_values": ( - # "auto", - # "simulation", - # "image" - # ), - # }, - # ), "ste_of_mean": ( False, { From 47bfa0689c4bed0442513ad1a0fef11f5950266b Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 02:35:33 +0200 Subject: [PATCH 125/174] Add test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index cd2e8bde5..1884e64a0 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -123,6 +123,29 @@ tolerance=13, sum_tolerance=1, ) + # start simulation + sim.run(number_of_sub_processes=4 ) + + # # print results at the end + # print(stat) + # print(dose) + # + # # tests + # stats_ref = utility.read_stat_file(ref_path / "stat.txt") + # is_ok = utility.assert_stats(stat, stats_ref, 0.11) + # + # print("\nDifference for EDEP") + # is_ok = ( + # utility.assert_images( + # ref_path / "output-Edep.mhd", + # dose.edep.get_output_path(), + # stat, + # tolerance=13, + # ignore_value=0, + # sum_tolerance=1, + # ) + # and is_ok + # ) # # print("\nDifference for uncertainty") # is_ok = ( From 352ac37db2cc52f920473ab96a4932fe9fffaaf9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 00:39:23 +0000 Subject: [PATCH 126/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/managers.py | 1 - opengate/processing.py | 1 + opengate/tests/src/test008_dose_actor_multiproc.py | 2 +- opengate/tests/src/test080_multiprocessing_handler.py | 5 +---- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index b8d9d93be..651adf7c3 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1838,7 +1838,6 @@ def run( simulation=self, number_of_processes= number_of_sub_processes - , ) multi_proc_handler.initialize() try: diff --git a/opengate/processing.py b/opengate/processing.py index c8fd0f6fd..44b8e64a2 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -4,6 +4,7 @@ from .exception import fatal from .base import GateObject + # define thin wrapper function to handle the queue def target_func(q, f, *args, **kwargs): q.put(f(*args, **kwargs)) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 1884e64a0..8dbf677dc 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -124,7 +124,7 @@ sum_tolerance=1, ) # start simulation - sim.run(number_of_sub_processes=4 ) + sim.run(number_of_sub_processes=4) # # print results at the end # print(stat) diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test080_multiprocessing_handler.py index 26780a69f..db1d18e9f 100755 --- a/opengate/tests/src/test080_multiprocessing_handler.py +++ b/opengate/tests/src/test080_multiprocessing_handler.py @@ -6,11 +6,8 @@ from opengate.processing import MultiProcessingHandlerEqualPerRunTimingInterval - if __name__ == "__main__": - paths = get_default_test_paths( - __file__, output_folder="test080" - ) + paths = get_default_test_paths(__file__, output_folder="test080") s = g4_units.s From cb90f2974223767739e93e9ff8f5662c1d7a5fa1 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sun, 13 Oct 2024 23:33:44 +0200 Subject: [PATCH 127/174] Rename FinalizeSimulation() to EndOfMultiProcessAction() --- opengate/managers.py | 1 - 1 file changed, 1 deletion(-) diff --git a/opengate/managers.py b/opengate/managers.py index 651adf7c3..dbec2fa7a 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1896,7 +1896,6 @@ def run( # because everything is already in place. output = self._run_simulation_engine(False) self.meta_data.extract_from_simulation_output(output) - if self.store_json_archive is True: self.to_json_file() From d4d0f4ba8fb8cc94fb237bb5d3f7ff02ce493d14 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 15 Oct 2024 00:01:28 +0200 Subject: [PATCH 128/174] Update test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 52 ++++++++++++------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 8dbf677dc..aae65b36e 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -124,28 +124,42 @@ sum_tolerance=1, ) # start simulation + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' sim.run(number_of_sub_processes=4) + t2 = time.time() + delta_t_nproc4 = t2 - t1 + + path_edep_nproc4 = dose.edep.get_output_path() + + t1 = time.time() + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.run(number_of_sub_processes=1) + t2 = time.time() + delta_t_nproc1 = t2 - t1 + + path_edep_nproc1 = dose.edep.get_output_path() + + # t1 = time.time() + # sim.run(number_of_sub_processes=0) + # t2 = time.time() + # delta_t_no_subproc = t2 - t1 + + print("Simulation times: ") + print(f"One subprocess: {delta_t_nproc1}") + print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + # print(f"No subprocess: {delta_t_no_subproc}") - # # print results at the end - # print(stat) - # print(dose) - # # # tests - # stats_ref = utility.read_stat_file(ref_path / "stat.txt") - # is_ok = utility.assert_stats(stat, stats_ref, 0.11) - # - # print("\nDifference for EDEP") - # is_ok = ( - # utility.assert_images( - # ref_path / "output-Edep.mhd", - # dose.edep.get_output_path(), - # stat, - # tolerance=13, - # ignore_value=0, - # sum_tolerance=1, - # ) - # and is_ok - # ) + print("\nDifference for EDEP") + is_ok = utility.assert_images( + path_edep_nproc1, + path_edep_nproc4, + stat, + tolerance=13, + ignore_value=0, + sum_tolerance=1, + ) # # print("\nDifference for uncertainty") # is_ok = ( From 8cb80b86ef838d0460c32d0d2082fb595d75113e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 22:02:29 +0000 Subject: [PATCH 129/174] [pre-commit.ci] Automatic python and c++ formatting --- .../tests/src/test008_dose_actor_multiproc.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index aae65b36e..8682ee4af 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -125,7 +125,7 @@ ) # start simulation t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_4' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_4" sim.run(number_of_sub_processes=4) t2 = time.time() delta_t_nproc4 = t2 - t1 @@ -133,7 +133,7 @@ path_edep_nproc4 = dose.edep.get_output_path() t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / 'nproc_1' + sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_1" sim.run(number_of_sub_processes=1) t2 = time.time() delta_t_nproc1 = t2 - t1 @@ -147,18 +147,20 @@ print("Simulation times: ") print(f"One subprocess: {delta_t_nproc1}") - print(f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}") + print( + f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}" + ) # print(f"No subprocess: {delta_t_no_subproc}") # # tests print("\nDifference for EDEP") is_ok = utility.assert_images( - path_edep_nproc1, - path_edep_nproc4, - stat, - tolerance=13, - ignore_value=0, - sum_tolerance=1, + path_edep_nproc1, + path_edep_nproc4, + stat, + tolerance=13, + ignore_value=0, + sum_tolerance=1, ) # # print("\nDifference for uncertainty") From ad36e564047827041837b65f0f788587abdfeb46 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:01:09 +0000 Subject: [PATCH 130/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/managers.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 445a6cbe7..1f0eac9fc 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1780,9 +1780,7 @@ def run_in_process( actor.write_to_disk = False output = self._run_simulation_engine(True, process_index=process_index) - print( - f"run_in_process finished in process {process_index}" - ) + print(f"run_in_process finished in process {process_index}") return output def run( @@ -1840,8 +1838,7 @@ def run( multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( name="multi_proc_handler", simulation=self, - number_of_processes= - number_of_sub_processes + number_of_processes=number_of_sub_processes, ) multi_proc_handler.initialize() try: From 216209ff043977e7b33f094a96e8d3b2af6d776c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 12:16:07 +0100 Subject: [PATCH 131/174] rename test080 on multi processing to test082_xxx --- ...{test080_multiprocessing_1.py => test082_multiprocessing_1.py} | 0 ...tiprocessing_handler.py => test082_multiprocessing_handler.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename opengate/tests/src/{test080_multiprocessing_1.py => test082_multiprocessing_1.py} (100%) rename opengate/tests/src/{test080_multiprocessing_handler.py => test082_multiprocessing_handler.py} (100%) diff --git a/opengate/tests/src/test080_multiprocessing_1.py b/opengate/tests/src/test082_multiprocessing_1.py similarity index 100% rename from opengate/tests/src/test080_multiprocessing_1.py rename to opengate/tests/src/test082_multiprocessing_1.py diff --git a/opengate/tests/src/test080_multiprocessing_handler.py b/opengate/tests/src/test082_multiprocessing_handler.py similarity index 100% rename from opengate/tests/src/test080_multiprocessing_handler.py rename to opengate/tests/src/test082_multiprocessing_handler.py From 8063a4dc8ad93929ec6656572d22f1fa4a743d83 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 12:16:26 +0100 Subject: [PATCH 132/174] Add merge_root to processing.py --- opengate/processing.py | 87 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/opengate/processing.py b/opengate/processing.py index 44b8e64a2..edc42a45d 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -1,5 +1,8 @@ import multiprocessing import queue +import numpy as np +import tqdm +import uproot from .exception import fatal from .base import GateObject @@ -142,3 +145,87 @@ def generate_dispatch_configuration(self): process_index += 1 self.dispatch_configuration = dispatch_configuration return dispatch_configuration + +def unicity(root_keys): + """ + Return an array containing the keys of the root file only one (without the version number) + """ + root_array = [] + for key in root_keys: + name = key.split(";") + if len(name) > 2: + name = ";".join(name) + else: + name = name[0] + if name not in root_array: + root_array.append(name) + return root_array + + +def merge_root(rootfiles, outputfile, increment_run_id=False): + """ + Merge root files in output files + """ + + uproot.default_library = "np" + + out = uproot.recreate(outputfile) + + # Previous ID values to be able to increment runIn or EventId + previous_id = {} + + # create the dict reading all input root files + trees = {} # TTree with TBranch + hists = {} # Directory with THist + pbar = tqdm.tqdm(total=len(rootfiles)) + for rf in rootfiles: + root = uproot.open(rf) + tree_names = unicity(root.keys()) + for tree_name in tree_names: + if hasattr(root[tree_name], 'keys'): + if tree_name not in trees: + trees[tree_name] = {"rootDictType": {}, "rootDictValue": {}} + hists[tree_name] = {"rootDictType": {}, "rootDictValue": {}} + previous_id[tree_name] = {} + for branch in root[tree_name].keys(): + if isinstance(root[tree_name], uproot.reading.ReadOnlyDirectory): + print(branch) + array = root[tree_name][branch].values() + if len(array) > 0: + branch_name = tree_name + "/" + branch + if isinstance(array[0], str): + array = np.zeros(len(array)) + if branch_name not in hists[tree_name]["rootDictType"]: + hists[tree_name]["rootDictType"][branch_name] = root[tree_name][branch].to_numpy() + hists[tree_name]["rootDictValue"][branch_name] = np.zeros(len(array)) + hists[tree_name]["rootDictValue"][branch_name] += array + else: + array = root[tree_name][branch].array(library="np") + if len(array) > 0 and not isinstance(array[0], np.ndarray): + if isinstance(array[0], str): + array = np.zeros(len(array)) + if branch not in trees[tree_name]["rootDictType"]: + trees[tree_name]["rootDictType"][branch] = type(array[0]) + trees[tree_name]["rootDictValue"][branch] = np.array([]) + if (not increment_run_id and branch.startswith('eventID')) or ( + increment_run_id and branch.startswith('runID')): + if branch not in previous_id[tree_name]: + previous_id[tree_name][branch] = 0 + array += previous_id[tree_name][branch] + previous_id[tree_name][branch] = max(array) + 1 + trees[tree_name]["rootDictValue"][branch] = np.append( + trees[tree_name]["rootDictValue"][branch], array) + pbar.update(1) + pbar.close() + + # Set the dict in the output root file + for tree_name in trees: + if not trees[tree_name]["rootDictValue"] == {} or not trees[tree_name]["rootDictType"] == {}: + # out.mktree(tree, trees[tree]["rootDictType"]) + out[tree_name] = trees[tree_name]["rootDictValue"] + for hist in hists.values(): + if len(hist["rootDictValue"]) > 0 and len(hist["rootDictType"]) > 0: + for branch in hist["rootDictValue"]: + for i in range(len(hist["rootDictValue"][branch])): + hist["rootDictType"][branch][0][i] = hist["rootDictValue"][branch][i] + out[branch[:-2]] = hist["rootDictType"][branch] From 3729d2580e71507feb1d8d301000d088d9248eb9 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:03:00 +0100 Subject: [PATCH 133/174] rename import_data_from_actor_output to merge_data_from_actor_output --- opengate/actors/actoroutput.py | 4 ++-- opengate/actors/base.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 44e5a2476..fc486c70d 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -385,7 +385,7 @@ def load_data(self, which): f"but it should be implemented in the specific derived class" ) - def import_data_from_actor_output(self, *actor_output, **kwargs): + def merge_data_from_actor_output(self, *actor_output, **kwargs): raise NotImplementedError("This is the base class. ") @@ -437,7 +437,7 @@ def end_of_simulation(self, **kwargs): f"A developer needs to fix this. " ) - def import_data_from_actor_output(self, *actor_output, discard_existing_data=True): + def merge_data_from_actor_output(self, *actor_output, discard_existing_data=True, **kwargs): run_indices_to_import = set() for ao in actor_output: run_indices_to_import.union(ao.data_per_run.keys()) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 3bda7209c..9973fc8d1 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -439,8 +439,8 @@ def import_user_output_from_actor(self, *actor): else: for k in self.user_output: try: - self.user_output[k].import_data_from_actor_output( - *[a.user_output[k] for a in actor] + self.user_output[k].merge_data_from_actor_output( + *[a.user_output[k] for a in actor], **kwargs ) except NotImplementedError: self.warn_user( From fafbb32ac24a20a93b9b5d16d7a45ee19735cc28 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:03:27 +0100 Subject: [PATCH 134/174] Extend signature of import_user_output_from_actor by **kwargs --- opengate/actors/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/actors/base.py b/opengate/actors/base.py index 9973fc8d1..1059b43a7 100644 --- a/opengate/actors/base.py +++ b/opengate/actors/base.py @@ -431,7 +431,7 @@ def recover_user_output(self, actor): for v in self.interfaces_to_user_output.values(): v.belongs_to_actor = self - def import_user_output_from_actor(self, *actor): + def import_user_output_from_actor(self, *actor, **kwargs): if not all([self.type_name == a.type_name for a in actor]): fatal("An actor can only import user output from the same type of actor.") if len(actor) == 1: From 69d964992f7ce89caee8424a3aaf4f573499d617 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:04:09 +0100 Subject: [PATCH 135/174] Add process_index attribute to SimulationEngine and SimulationOutput --- opengate/engines.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/engines.py b/opengate/engines.py index afdf9eeae..c2e4bfde8 100644 --- a/opengate/engines.py +++ b/opengate/engines.py @@ -926,6 +926,7 @@ def __init__(self): self.current_random_seed = None self.user_hook_log = [] self.warnings = None + self.process_index = None def store_output_from_simulation_engine(self, simulation_engine): self.store_actors(simulation_engine) @@ -937,6 +938,7 @@ def store_output_from_simulation_engine(self, simulation_engine): ) self.warnings = simulation_engine.simulation.warnings self.simulation_id = id(simulation_engine.simulation) + self.process_index = simulation_engine.process_index def store_actors(self, simulation_engine): self.actors = simulation_engine.simulation.actor_manager.actors From 33fdb506200e204d3a0ebf332f68ff0c3aa59271 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:05:07 +0100 Subject: [PATCH 136/174] Implement clear_output_dir_before_run option in sim.run() --- opengate/managers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 1f0eac9fc..053e9b2f8 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -36,6 +36,7 @@ ensure_directory_exists, ensure_filename_is_str, insert_suffix_before_extension, + delete_folder_contents ) from . import logger from .logger import log @@ -1788,6 +1789,7 @@ def run( start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True, + clear_output_dir_before_run=False ): # if windows and MT -> fail if os.name == "nt" and self.multithreaded: @@ -1803,6 +1805,9 @@ def run( self.meta_data.number_of_sub_processes = number_of_sub_processes self.meta_data.start_new_process = start_new_process + if clear_output_dir_before_run is True: + delete_folder_contents(self.get_output_path()) + for actor in self.actor_manager.actors.values(): actor.reset_user_output() From 26d6c54a8c854e5f85ffdef54192e40b48efb9f3 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:06:59 +0100 Subject: [PATCH 137/174] Implement sub_process_registry --- opengate/managers.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index 053e9b2f8..d8354a95f 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1531,6 +1531,7 @@ def __init__(self, name="simulation", **kwargs): self.meta_data = SimulationMetaData() self.meta_data_per_process = {} + self.sub_process_registry = None # main managers self.volume_manager = VolumeManager(self) @@ -1789,6 +1790,7 @@ def run( start_new_process=False, number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True, + merge_after_multiprocessing=True, clear_output_dir_before_run=False ): # if windows and MT -> fail @@ -1853,13 +1855,19 @@ def run( pass # q = multiprocessing.Queue() + self.sub_process_registry = dict([(i, {"output_dir": str(Path(self.output_dir) / f"process_{i}")}) + for i in range(number_of_sub_processes)]) + with multiprocessing.Pool(number_of_sub_processes) as pool: results = [ pool.apply_async( self.run_in_process, - (multi_proc_handler, i, avoid_write_to_disk_in_subprocess), + (multi_proc_handler, + k, + v["output_dir"], + avoid_write_to_disk_in_subprocess), ) - for i in range(number_of_sub_processes) + for k, v in self.sub_process_registry.items() ] # `.apply_async()` immediately returns AsyncResult (ApplyResult) object list_of_output = [res.get() for res in results] From 9fe49157c9d507035653222c9bc2fb019b8d96e8 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:08:23 +0100 Subject: [PATCH 138/174] Make multi_proc_handler an instance attribute --- opengate/managers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index d8354a95f..d13e6c84c 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1532,6 +1532,7 @@ def __init__(self, name="simulation", **kwargs): self.meta_data = SimulationMetaData() self.meta_data_per_process = {} self.sub_process_registry = None + self.multi_proc_handler = None # main managers self.volume_manager = VolumeManager(self) @@ -1842,12 +1843,13 @@ def run( self.meta_data.extract_from_simulation_output(output) elif number_of_sub_processes > 1: - multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( + self.multi_proc_handler = MultiProcessingHandlerEqualPerRunTimingInterval( name="multi_proc_handler", simulation=self, number_of_processes=number_of_sub_processes, ) multi_proc_handler.initialize() + self.multi_proc_handler.initialize() try: multiprocessing.set_start_method("spawn") except RuntimeError: @@ -1862,7 +1864,7 @@ def run( results = [ pool.apply_async( self.run_in_process, - (multi_proc_handler, + (self.multi_proc_handler, k, v["output_dir"], avoid_write_to_disk_in_subprocess), From e1d58bf83dcb5e8cd5882bbea6215e26fac149de Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:09:11 +0100 Subject: [PATCH 139/174] Add output_dir as argument to run_in_process() --- opengate/managers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index d13e6c84c..6a3f4e751 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1755,14 +1755,14 @@ def _run_simulation_engine(self, start_new_process, process_index=None): return output def run_in_process( - self, multi_process_handler, process_index, avoid_write_to_disk_in_subprocess + self, multi_process_handler, process_index, output_dir, avoid_write_to_disk_in_subprocess ): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation # and we can safely adapt it in this process. # adapt the output_dir - self.output_dir = str(Path(self.output_dir) / f"process_{process_index}") + self.output_dir = output_dir if self.random_seed != "auto": self.random_seed += process_index @@ -1848,8 +1848,8 @@ def run( simulation=self, number_of_processes=number_of_sub_processes, ) - multi_proc_handler.initialize() self.multi_proc_handler.initialize() + __spec__ = None try: multiprocessing.set_start_method("spawn") except RuntimeError: From a70ba52817dae1bf29b8d643d3904f1058e3aab1 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:09:35 +0100 Subject: [PATCH 140/174] Let each process store a json in the subfolder --- opengate/managers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opengate/managers.py b/opengate/managers.py index 6a3f4e751..e911e9d96 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1784,6 +1784,7 @@ def run_in_process( output = self._run_simulation_engine(True, process_index=process_index) print(f"run_in_process finished in process {process_index}") + self.to_json_file() return output def run( From f5f0d44f14f30a673926d2d8039c1a1fab0e1258 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:10:13 +0100 Subject: [PATCH 141/174] Capsulate code into merge_simulations_from_multiprocessing() --- opengate/managers.py | 75 ++++++++++++++++++++++++++------------------ 1 file changed, 44 insertions(+), 31 deletions(-) diff --git a/opengate/managers.py b/opengate/managers.py index e911e9d96..c3d12ada4 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1876,43 +1876,15 @@ def run( list_of_output = [res.get() for res in results] log.info("End of multiprocessing") - # FOR DEBUGGING. remove when ready - self.multi_proc_handler = multi_proc_handler + if merge_after_multiprocessing is True: + self.merge_simulations_from_multiprocessing(list_of_output) - # loop over actors in original simulation - for actor in self.actor_manager.actors.values(): - actor.import_user_output_from_actor( - *[ - o.get_actor(actor.name) for o in list_of_output - ] # these are the actors from the process - ) - - for actor in self.actor_manager.actors.values(): - actor.EndOfMultiProcessAction() - - self.meta_data.extract_from_simulation_output(*list_of_output) - for i, o in enumerate(list_of_output): - self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) - - # FIXME: temporary workaround to collect extra info from output - # will be implemented similar to actor.import_user_output_from_actor after source refactoring - for source in self.source_manager.user_info_sources.values(): - for o in list_of_output: - try: - s = o.get_source(source.name) - except: - continue - if "fTotalSkippedEvents" in s.user_info.__dict__: - if not hasattr(source, "fTotalSkippedEvents"): - source.fTotalSkippedEvents = 0 - source.fTotalZeroEvents = 0 - source.fTotalSkippedEvents += s.user_info.fTotalSkippedEvents - source.fTotalZeroEvents += s.user_info.fTotalZeroEvents else: # Nothing special to do if the simulation engine ran in the native python process # because everything is already in place. output = self._run_simulation_engine(False) self.meta_data.extract_from_simulation_output(output) + if self.store_json_archive is True: self.to_json_file() @@ -1929,6 +1901,47 @@ def run( print() print("*" * 20) + def merge_simulations_from_multiprocessing(self, list_of_output): + """To be run after a simulation has run in a multiple subprocesses. + Currently, the input is a list of SimulationOutput instances, + but in the future the input will be a list of Simulation instances + (returned or recreated from the subprocesses). """ + if self.multi_proc_handler is None: + fatal("Cannot execute merge_simulations_from_multiprocessing without a multi_proc_handler. ") + + luts_run_index = [self.multi_proc_handler.get_original_run_timing_indices_for_process(o.process_index) + for o in list_of_output] + + # loop over actors in original simulation + for actor in self.actor_manager.actors.values(): + actors_to_merge = [o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process + actor.import_user_output_from_actor( + *actors_to_merge, + luts_run_index=luts_run_index + ) + + for actor in self.actor_manager.actors.values(): + actor.EndOfMultiProcessAction() + + self.meta_data.extract_from_simulation_output(*list_of_output) + for i, o in enumerate(list_of_output): + self.meta_data_per_process[i] = SimulationMetaData(simulation_output=o) + + # FIXME: temporary workaround to collect extra info from output + # will be implemented similar to actor.import_user_output_from_actor after source refactoring + for source in self.source_manager.user_info_sources.values(): + for o in list_of_output: + try: + s = o.get_source(source.name) + except: + continue + if "fTotalSkippedEvents" in s.user_info.__dict__: + if not hasattr(source, "fTotalSkippedEvents"): + source.fTotalSkippedEvents = 0 + source.fTotalZeroEvents = 0 + source.fTotalSkippedEvents += s.user_info.fTotalSkippedEvents + source.fTotalZeroEvents += s.user_info.fTotalZeroEvents + def voxelize_geometry( self, extent="auto", From c9b9b6649aaecf9f912432a9edd12958fa82bb1c Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:12:49 +0100 Subject: [PATCH 142/174] check global variable __spec__ in run() to avoid breaking when running multiproc in interactive session --- opengate/managers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/opengate/managers.py b/opengate/managers.py index c3d12ada4..009f8eb15 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1795,6 +1795,7 @@ def run( merge_after_multiprocessing=True, clear_output_dir_before_run=False ): + global __spec__ # if windows and MT -> fail if os.name == "nt" and self.multithreaded: fatal( @@ -1854,7 +1855,7 @@ def run( try: multiprocessing.set_start_method("spawn") except RuntimeError: - print("Could not set start method 'spawn'.") + print(f"Could not set start method 'spawn'. __spec__ = {__spec__}") pass # q = multiprocessing.Queue() From 43a8292daba8b2a0cf918223eef2eb6f9f863d88 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:13:34 +0100 Subject: [PATCH 143/174] Remove merge_root() and unicity(): now in ActorOutputRoot class --- opengate/processing.py | 84 ------------------------------------------ 1 file changed, 84 deletions(-) diff --git a/opengate/processing.py b/opengate/processing.py index edc42a45d..34ef7a40b 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -145,87 +145,3 @@ def generate_dispatch_configuration(self): process_index += 1 self.dispatch_configuration = dispatch_configuration return dispatch_configuration - -def unicity(root_keys): - """ - Return an array containing the keys of the root file only one (without the version number) - """ - root_array = [] - for key in root_keys: - name = key.split(";") - if len(name) > 2: - name = ";".join(name) - else: - name = name[0] - if name not in root_array: - root_array.append(name) - return root_array - - -def merge_root(rootfiles, outputfile, increment_run_id=False): - """ - Merge root files in output files - """ - - uproot.default_library = "np" - - out = uproot.recreate(outputfile) - - # Previous ID values to be able to increment runIn or EventId - previous_id = {} - - # create the dict reading all input root files - trees = {} # TTree with TBranch - hists = {} # Directory with THist - pbar = tqdm.tqdm(total=len(rootfiles)) - for rf in rootfiles: - root = uproot.open(rf) - tree_names = unicity(root.keys()) - for tree_name in tree_names: - if hasattr(root[tree_name], 'keys'): - if tree_name not in trees: - trees[tree_name] = {"rootDictType": {}, "rootDictValue": {}} - hists[tree_name] = {"rootDictType": {}, "rootDictValue": {}} - previous_id[tree_name] = {} - for branch in root[tree_name].keys(): - if isinstance(root[tree_name], uproot.reading.ReadOnlyDirectory): - print(branch) - array = root[tree_name][branch].values() - if len(array) > 0: - branch_name = tree_name + "/" + branch - if isinstance(array[0], str): - array = np.zeros(len(array)) - if branch_name not in hists[tree_name]["rootDictType"]: - hists[tree_name]["rootDictType"][branch_name] = root[tree_name][branch].to_numpy() - hists[tree_name]["rootDictValue"][branch_name] = np.zeros(len(array)) - hists[tree_name]["rootDictValue"][branch_name] += array - else: - array = root[tree_name][branch].array(library="np") - if len(array) > 0 and not isinstance(array[0], np.ndarray): - if isinstance(array[0], str): - array = np.zeros(len(array)) - if branch not in trees[tree_name]["rootDictType"]: - trees[tree_name]["rootDictType"][branch] = type(array[0]) - trees[tree_name]["rootDictValue"][branch] = np.array([]) - if (not increment_run_id and branch.startswith('eventID')) or ( - increment_run_id and branch.startswith('runID')): - if branch not in previous_id[tree_name]: - previous_id[tree_name][branch] = 0 - array += previous_id[tree_name][branch] - previous_id[tree_name][branch] = max(array) + 1 - trees[tree_name]["rootDictValue"][branch] = np.append( - trees[tree_name]["rootDictValue"][branch], array) - pbar.update(1) - pbar.close() - - # Set the dict in the output root file - for tree_name in trees: - if not trees[tree_name]["rootDictValue"] == {} or not trees[tree_name]["rootDictType"] == {}: - # out.mktree(tree, trees[tree]["rootDictType"]) - out[tree_name] = trees[tree_name]["rootDictValue"] - for hist in hists.values(): - if len(hist["rootDictValue"]) > 0 and len(hist["rootDictType"]) > 0: - for branch in hist["rootDictValue"]: - for i in range(len(hist["rootDictValue"][branch])): - hist["rootDictType"][branch][0][i] = hist["rootDictValue"][branch][i] - out[branch[:-2]] = hist["rootDictType"][branch] From facc182ce552a0bf5f9493b48b54474ac5e212e6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:14:04 +0100 Subject: [PATCH 144/174] Implement ActorOutputRootmerge_data_from_actor_output() --- opengate/actors/actoroutput.py | 93 ++++++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index fc486c70d..8bd07094e 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -1,5 +1,8 @@ from box import Box from typing import Optional +import uproot +import tqdm +import numpy as np from ..base import GateObject, process_cls from ..utility import insert_suffix_before_extension, ensure_filename_is_str @@ -957,6 +960,96 @@ def initialize_cpp_parameters(self): self.name, self.get_output_path_as_string() ) + def merge_data_from_actor_output(self, *actor_output, luts_run_index=None, **kwargs): + """ + luts_run_index: a list of lookup table, one for each root file. + The run index in the root file to be merged serves as array index to the lookup table. + The value recovered from the lookup table is the new run index to be written into the merged file. + """ + + uproot.default_library = "np" + + out = uproot.recreate(self.get_output_path()) + rootfiles = [a.get_output_path() for a in actor_output] + + # Previous ID values to be able to increment runIn or EventId + previous_id = {} + + # create the dict reading all input root files + trees = {} # TTree with TBranch + hists = {} # Directory with THist + pbar = tqdm.tqdm(total=len(rootfiles)) + for rootfile_index, rf in enumerate(rootfiles): + with uproot.open(rf) as root: + tree_names = unicity(root.keys()) + for tree_name in tree_names: + if hasattr(root[tree_name], 'keys'): + if tree_name not in trees: + trees[tree_name] = {"rootDictType": {}, "rootDictValue": {}} + hists[tree_name] = {"rootDictType": {}, "rootDictValue": {}} + previous_id[tree_name] = {} + for branch in root[tree_name].keys(): + # HISTOGRAMS + if isinstance(root[tree_name], uproot.reading.ReadOnlyDirectory): + print(branch) + array = root[tree_name][branch].values() + if len(array) > 0: + branch_name = tree_name + "/" + branch + if isinstance(array[0], str): + array = np.zeros(len(array)) + if branch_name not in hists[tree_name]["rootDictType"]: + hists[tree_name]["rootDictType"][branch_name] = root[tree_name][branch].to_numpy() + hists[tree_name]["rootDictValue"][branch_name] = np.zeros(len(array)) + hists[tree_name]["rootDictValue"][branch_name] += array + else: + # ARRAYS + array = root[tree_name][branch].array(library="np") + if len(array) > 0 and not isinstance(array[0], np.ndarray): + if isinstance(array[0], str): + array = np.zeros(len(array)) + if branch not in trees[tree_name]["rootDictType"]: + trees[tree_name]["rootDictType"][branch] = type(array[0]) + trees[tree_name]["rootDictValue"][branch] = np.array([]) + if branch.startswith('RunID') and luts_run_index is not None: + luts_run_index = np.asarray(luts_run_index) + array = luts_run_index[rootfile_index][array.astype(int)] + if branch.startswith('EventID'): + if branch not in previous_id[tree_name]: + previous_id[tree_name][branch] = 0 + array += previous_id[tree_name][branch] + previous_id[tree_name][branch] = max(array) + 1 + trees[tree_name]["rootDictValue"][branch] = np.append( + trees[tree_name]["rootDictValue"][branch], array) + pbar.update(1) + pbar.close() + + # Set the dict in the output root file + for tree_name in trees: + if not trees[tree_name]["rootDictValue"] == {} or not trees[tree_name]["rootDictType"] == {}: + # out.mktree(tree, trees[tree]["rootDictType"]) + out[tree_name] = trees[tree_name]["rootDictValue"] + for hist in hists.values(): + if len(hist["rootDictValue"]) > 0 and len(hist["rootDictType"]) > 0: + for branch in hist["rootDictValue"]: + for i in range(len(hist["rootDictValue"][branch])): + hist["rootDictType"][branch][0][i] = hist["rootDictValue"][branch][i] + out[branch[:-2]] = hist["rootDictType"][branch] + + +def unicity(root_keys): + """ + Return an array containing the keys of the root file only one (without the version number) + """ + root_array = [] + for key in root_keys: + name = key.split(";") + if len(name) > 2: + name = ";".join(name) + else: + name = name[0] + if name not in root_array: + root_array.append(name) + return root_array process_cls(ActorOutputBase) process_cls(MergeableActorOutput) From 5d06d5bdf05ae1bf66960d139a6c3c1552095f10 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:14:18 +0100 Subject: [PATCH 145/174] Add test019_phsp_actor_multiproc.py --- .../tests/src/test019_phsp_actor_multiproc.py | 116 ++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100755 opengate/tests/src/test019_phsp_actor_multiproc.py diff --git a/opengate/tests/src/test019_phsp_actor_multiproc.py b/opengate/tests/src/test019_phsp_actor_multiproc.py new file mode 100755 index 000000000..36e6d9688 --- /dev/null +++ b/opengate/tests/src/test019_phsp_actor_multiproc.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import opengate as gate +from opengate.tests import utility +import uproot +import numpy as np + + +if __name__ == "__main__": + paths = utility.get_default_test_paths(__file__, "", output_folder="test019_multiproc") + + # create the simulation + sim = gate.Simulation() + + # main options + sim.output_dir = paths.output + sim.g4_verbose = False + sim.visu = False + sim.visu_type = "vrml" + sim.check_volumes_overlap = False + sim.number_of_threads = 1 + sim.random_seed = 321654 + + # units + m = gate.g4_units.m + mm = gate.g4_units.mm + nm = gate.g4_units.nm + Bq = gate.g4_units.Bq + MeV = gate.g4_units.MeV + + # adapt world size + sim.world.size = [1 * m, 1 * m, 1 * m] + sim.world.material = "G4_AIR" + + # virtual plane for phase space + plane = sim.add_volume("Tubs", "phase_space_plane") + plane.mother = sim.world + plane.material = "G4_AIR" + plane.rmin = 0 + plane.rmax = 700 * mm + plane.dz = 1 * nm # half height + plane.translation = [0, 0, -100 * mm] + plane.color = [1, 0, 0, 1] # red + + # e- source + source = sim.add_source("GenericSource", "Default") + source.particle = "gamma" + source.energy.type = "gauss" + source.energy.mono = 1 * MeV + source.energy.sigma_gauss = 0.5 * MeV + source.position.type = "disc" + source.position.radius = 20 * mm + source.position.translation = [0, 0, 0 * mm] + source.direction.type = "momentum" + source.n = 66 + + # add stat actor + stats_actor = sim.add_actor("SimulationStatisticsActor", "Stats") + stats_actor.track_types_flag = True + + # PhaseSpace Actor + phsp_actor = sim.add_actor("PhaseSpaceActor", "PhaseSpace") + phsp_actor.attached_to = plane.name + phsp_actor.attributes = [ + "KineticEnergy", + "PostPosition", + "PrePosition", + "PrePositionLocal", + "ParticleName", + "PreDirection", + "PreDirectionLocal", + "PostDirection", + "TimeFromBeginOfEvent", + "GlobalTime", + "LocalTime", + "EventPosition", + "PDGCode", + "EventID", + "RunID" + ] + phsp_actor.debug = False + + # run the simulation once with no particle in the phsp + source.direction.momentum = [0, 0, -1] + phsp_actor.output_filename = "test019_phsp_actor.root" + + sim.run_timing_intervals = [(i, i+1) for i in range(3)] + + # run + nb_proc = 3 + sim.output_dir = paths.output / "multiproc" + sim.run(number_of_sub_processes=nb_proc, + avoid_write_to_disk_in_subprocess=False, + clear_output_dir_before_run=True) + path_phsp_output_single = phsp_actor.get_output_path() + + sim.output_dir = paths.output / "singleproc" + sim.run(number_of_sub_processes=nb_proc, + avoid_write_to_disk_in_subprocess=False, + clear_output_dir_before_run=True) + path_phsp_output_multi = phsp_actor.get_output_path() + + + f_multi = uproot.open(path_phsp_output_multi) + eventid_multi = np.asarray(f_multi['PhaseSpace;1']['EventID']) + runid_multi = np.asarray(f_multi['PhaseSpace;1']['RunID']) + + f_single = uproot.open(path_phsp_output_single) + eventid_single = np.asarray(f_single['PhaseSpace;1']['EventID']) + runid_single = np.asarray(f_single['PhaseSpace;1']['RunID']) + + assert len(set(eventid_multi)) == len(eventid_multi) + assert set(runid_multi) == set([i for i in range(len(sim.run_timing_intervals))]) + assert set(eventid_single) == set(eventid_multi) + assert set(runid_single) == set(runid_multi) From 01d0ea52ab062c3c200f822e4d6ec880a1a1aa36 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:15:48 +0000 Subject: [PATCH 146/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/actors/actoroutput.py | 72 ++++++++++++++----- opengate/managers.py | 49 ++++++++----- .../tests/src/test019_phsp_actor_multiproc.py | 33 +++++---- 3 files changed, 106 insertions(+), 48 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 8bd07094e..233ad0eb0 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -440,7 +440,9 @@ def end_of_simulation(self, **kwargs): f"A developer needs to fix this. " ) - def merge_data_from_actor_output(self, *actor_output, discard_existing_data=True, **kwargs): + def merge_data_from_actor_output( + self, *actor_output, discard_existing_data=True, **kwargs + ): run_indices_to_import = set() for ao in actor_output: run_indices_to_import.union(ao.data_per_run.keys()) @@ -960,7 +962,9 @@ def initialize_cpp_parameters(self): self.name, self.get_output_path_as_string() ) - def merge_data_from_actor_output(self, *actor_output, luts_run_index=None, **kwargs): + def merge_data_from_actor_output( + self, *actor_output, luts_run_index=None, **kwargs + ): """ luts_run_index: a list of lookup table, one for each root file. The run index in the root file to be merged serves as array index to the lookup table. @@ -983,56 +987,87 @@ def merge_data_from_actor_output(self, *actor_output, luts_run_index=None, **kwa with uproot.open(rf) as root: tree_names = unicity(root.keys()) for tree_name in tree_names: - if hasattr(root[tree_name], 'keys'): + if hasattr(root[tree_name], "keys"): if tree_name not in trees: trees[tree_name] = {"rootDictType": {}, "rootDictValue": {}} hists[tree_name] = {"rootDictType": {}, "rootDictValue": {}} previous_id[tree_name] = {} for branch in root[tree_name].keys(): # HISTOGRAMS - if isinstance(root[tree_name], uproot.reading.ReadOnlyDirectory): + if isinstance( + root[tree_name], uproot.reading.ReadOnlyDirectory + ): print(branch) array = root[tree_name][branch].values() if len(array) > 0: branch_name = tree_name + "/" + branch if isinstance(array[0], str): array = np.zeros(len(array)) - if branch_name not in hists[tree_name]["rootDictType"]: - hists[tree_name]["rootDictType"][branch_name] = root[tree_name][branch].to_numpy() - hists[tree_name]["rootDictValue"][branch_name] = np.zeros(len(array)) - hists[tree_name]["rootDictValue"][branch_name] += array + if ( + branch_name + not in hists[tree_name]["rootDictType"] + ): + hists[tree_name]["rootDictType"][ + branch_name + ] = root[tree_name][branch].to_numpy() + hists[tree_name]["rootDictValue"][ + branch_name + ] = np.zeros(len(array)) + hists[tree_name]["rootDictValue"][ + branch_name + ] += array else: # ARRAYS array = root[tree_name][branch].array(library="np") - if len(array) > 0 and not isinstance(array[0], np.ndarray): + if len(array) > 0 and not isinstance( + array[0], np.ndarray + ): if isinstance(array[0], str): array = np.zeros(len(array)) if branch not in trees[tree_name]["rootDictType"]: - trees[tree_name]["rootDictType"][branch] = type(array[0]) - trees[tree_name]["rootDictValue"][branch] = np.array([]) - if branch.startswith('RunID') and luts_run_index is not None: + trees[tree_name]["rootDictType"][branch] = type( + array[0] + ) + trees[tree_name]["rootDictValue"][branch] = ( + np.array([]) + ) + if ( + branch.startswith("RunID") + and luts_run_index is not None + ): luts_run_index = np.asarray(luts_run_index) - array = luts_run_index[rootfile_index][array.astype(int)] - if branch.startswith('EventID'): + array = luts_run_index[rootfile_index][ + array.astype(int) + ] + if branch.startswith("EventID"): if branch not in previous_id[tree_name]: previous_id[tree_name][branch] = 0 array += previous_id[tree_name][branch] previous_id[tree_name][branch] = max(array) + 1 - trees[tree_name]["rootDictValue"][branch] = np.append( - trees[tree_name]["rootDictValue"][branch], array) + trees[tree_name]["rootDictValue"][branch] = ( + np.append( + trees[tree_name]["rootDictValue"][branch], + array, + ) + ) pbar.update(1) pbar.close() # Set the dict in the output root file for tree_name in trees: - if not trees[tree_name]["rootDictValue"] == {} or not trees[tree_name]["rootDictType"] == {}: + if ( + not trees[tree_name]["rootDictValue"] == {} + or not trees[tree_name]["rootDictType"] == {} + ): # out.mktree(tree, trees[tree]["rootDictType"]) out[tree_name] = trees[tree_name]["rootDictValue"] for hist in hists.values(): if len(hist["rootDictValue"]) > 0 and len(hist["rootDictType"]) > 0: for branch in hist["rootDictValue"]: for i in range(len(hist["rootDictValue"][branch])): - hist["rootDictType"][branch][0][i] = hist["rootDictValue"][branch][i] + hist["rootDictType"][branch][0][i] = hist["rootDictValue"][ + branch + ][i] out[branch[:-2]] = hist["rootDictType"][branch] @@ -1051,6 +1086,7 @@ def unicity(root_keys): root_array.append(name) return root_array + process_cls(ActorOutputBase) process_cls(MergeableActorOutput) process_cls(ActorOutputUsingDataItemContainer) diff --git a/opengate/managers.py b/opengate/managers.py index 009f8eb15..2944d4ad8 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -36,7 +36,7 @@ ensure_directory_exists, ensure_filename_is_str, insert_suffix_before_extension, - delete_folder_contents + delete_folder_contents, ) from . import logger from .logger import log @@ -1755,7 +1755,11 @@ def _run_simulation_engine(self, start_new_process, process_index=None): return output def run_in_process( - self, multi_process_handler, process_index, output_dir, avoid_write_to_disk_in_subprocess + self, + multi_process_handler, + process_index, + output_dir, + avoid_write_to_disk_in_subprocess, ): # Important: this method is intended to run in a processes spawned off the main process. # Therefore, self is actually a separate instance from the original simulation @@ -1793,7 +1797,7 @@ def run( number_of_sub_processes=0, avoid_write_to_disk_in_subprocess=True, merge_after_multiprocessing=True, - clear_output_dir_before_run=False + clear_output_dir_before_run=False, ): global __spec__ # if windows and MT -> fail @@ -1859,17 +1863,23 @@ def run( pass # q = multiprocessing.Queue() - self.sub_process_registry = dict([(i, {"output_dir": str(Path(self.output_dir) / f"process_{i}")}) - for i in range(number_of_sub_processes)]) + self.sub_process_registry = dict( + [ + (i, {"output_dir": str(Path(self.output_dir) / f"process_{i}")}) + for i in range(number_of_sub_processes) + ] + ) with multiprocessing.Pool(number_of_sub_processes) as pool: results = [ pool.apply_async( self.run_in_process, - (self.multi_proc_handler, - k, - v["output_dir"], - avoid_write_to_disk_in_subprocess), + ( + self.multi_proc_handler, + k, + v["output_dir"], + avoid_write_to_disk_in_subprocess, + ), ) for k, v in self.sub_process_registry.items() ] @@ -1906,19 +1916,26 @@ def merge_simulations_from_multiprocessing(self, list_of_output): """To be run after a simulation has run in a multiple subprocesses. Currently, the input is a list of SimulationOutput instances, but in the future the input will be a list of Simulation instances - (returned or recreated from the subprocesses). """ + (returned or recreated from the subprocesses).""" if self.multi_proc_handler is None: - fatal("Cannot execute merge_simulations_from_multiprocessing without a multi_proc_handler. ") + fatal( + "Cannot execute merge_simulations_from_multiprocessing without a multi_proc_handler. " + ) - luts_run_index = [self.multi_proc_handler.get_original_run_timing_indices_for_process(o.process_index) - for o in list_of_output] + luts_run_index = [ + self.multi_proc_handler.get_original_run_timing_indices_for_process( + o.process_index + ) + for o in list_of_output + ] # loop over actors in original simulation for actor in self.actor_manager.actors.values(): - actors_to_merge = [o.get_actor(actor.name) for o in list_of_output] # these are the actors from the process + actors_to_merge = [ + o.get_actor(actor.name) for o in list_of_output + ] # these are the actors from the process actor.import_user_output_from_actor( - *actors_to_merge, - luts_run_index=luts_run_index + *actors_to_merge, luts_run_index=luts_run_index ) for actor in self.actor_manager.actors.values(): diff --git a/opengate/tests/src/test019_phsp_actor_multiproc.py b/opengate/tests/src/test019_phsp_actor_multiproc.py index 36e6d9688..f64ffd833 100755 --- a/opengate/tests/src/test019_phsp_actor_multiproc.py +++ b/opengate/tests/src/test019_phsp_actor_multiproc.py @@ -8,7 +8,9 @@ if __name__ == "__main__": - paths = utility.get_default_test_paths(__file__, "", output_folder="test019_multiproc") + paths = utility.get_default_test_paths( + __file__, "", output_folder="test019_multiproc" + ) # create the simulation sim = gate.Simulation() @@ -77,7 +79,7 @@ "EventPosition", "PDGCode", "EventID", - "RunID" + "RunID", ] phsp_actor.debug = False @@ -85,30 +87,33 @@ source.direction.momentum = [0, 0, -1] phsp_actor.output_filename = "test019_phsp_actor.root" - sim.run_timing_intervals = [(i, i+1) for i in range(3)] + sim.run_timing_intervals = [(i, i + 1) for i in range(3)] # run nb_proc = 3 sim.output_dir = paths.output / "multiproc" - sim.run(number_of_sub_processes=nb_proc, - avoid_write_to_disk_in_subprocess=False, - clear_output_dir_before_run=True) + sim.run( + number_of_sub_processes=nb_proc, + avoid_write_to_disk_in_subprocess=False, + clear_output_dir_before_run=True, + ) path_phsp_output_single = phsp_actor.get_output_path() sim.output_dir = paths.output / "singleproc" - sim.run(number_of_sub_processes=nb_proc, - avoid_write_to_disk_in_subprocess=False, - clear_output_dir_before_run=True) + sim.run( + number_of_sub_processes=nb_proc, + avoid_write_to_disk_in_subprocess=False, + clear_output_dir_before_run=True, + ) path_phsp_output_multi = phsp_actor.get_output_path() - f_multi = uproot.open(path_phsp_output_multi) - eventid_multi = np.asarray(f_multi['PhaseSpace;1']['EventID']) - runid_multi = np.asarray(f_multi['PhaseSpace;1']['RunID']) + eventid_multi = np.asarray(f_multi["PhaseSpace;1"]["EventID"]) + runid_multi = np.asarray(f_multi["PhaseSpace;1"]["RunID"]) f_single = uproot.open(path_phsp_output_single) - eventid_single = np.asarray(f_single['PhaseSpace;1']['EventID']) - runid_single = np.asarray(f_single['PhaseSpace;1']['RunID']) + eventid_single = np.asarray(f_single["PhaseSpace;1"]["EventID"]) + runid_single = np.asarray(f_single["PhaseSpace;1"]["RunID"]) assert len(set(eventid_multi)) == len(eventid_multi) assert set(runid_multi) == set([i for i in range(len(sim.run_timing_intervals))]) From 245b73bc9fb668959290c93d20498bb397a1ce85 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Mon, 28 Oct 2024 17:16:39 +0100 Subject: [PATCH 147/174] Remove obsolete imports in processing.py --- opengate/processing.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/opengate/processing.py b/opengate/processing.py index 34ef7a40b..44b8e64a2 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -1,8 +1,5 @@ import multiprocessing import queue -import numpy as np -import tqdm -import uproot from .exception import fatal from .base import GateObject From aaed7a5eddbabaa540c50761881d207083f3a485 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 29 Oct 2024 14:25:02 +0100 Subject: [PATCH 148/174] Add property counts to SimulationStatisticsActor --- opengate/actors/miscactors.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index dbab1679a..869cf14e8 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -34,6 +34,10 @@ def __initcpp__(self): g4.GateSimulationStatisticsActor.__init__(self, self.user_info) self.AddActions({"StartSimulationAction", "EndSimulationAction"}) + @property + def counts(self): + return self.user_output.stats.merged_data + def __str__(self): s = self.user_output["stats"].__str__() return s From c70cfef5e07d5a9944d894ad2e626c76ed665932 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 29 Oct 2024 14:25:38 +0100 Subject: [PATCH 149/174] remove obsolete UserInterfaceToActorOutputStatisticsActor --- opengate/actors/actoroutput.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 233ad0eb0..cb59f950b 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -179,17 +179,6 @@ def image(self): return self._user_output.get_data(**self._kwargs_for_interface_calls) -class UserInterfaceToActorOutputStatisticsActor( - UserInterfaceToActorOutputUsingDataItemContainer -): - - @property - def counts(self): - return self._user_output.get_data( - which="merged", **self._kwargs_for_interface_calls - ) - - def _setter_hook_belongs_to(self, belongs_to): if belongs_to is None: fatal("The belongs_to attribute of an ActorOutput cannot be None.") @@ -846,7 +835,7 @@ def _setter_hook_encoder(self, value): class ActorOutputStatisticsActor(ActorOutputUsingDataItemContainer): """This is a hand-crafted ActorOutput specifically for the SimulationStatisticsActor.""" - _default_interface_class = UserInterfaceToActorOutputStatisticsActor + _default_interface_class = UserInterfaceToActorOutputUsingDataItemContainer data_container_class = StatisticsItemContainer # hints for IDE From f16c1ea834dd89009fba921bcc47f31150f6ad82 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 29 Oct 2024 14:26:56 +0100 Subject: [PATCH 150/174] Extend DataItem.__getattr__ to get attributes from self.data if possible --- opengate/actors/dataitems.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 12e5d898e..82d793a69 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -85,6 +85,8 @@ def hand_down(*args, **kwargs): getattr(self.data, item)(*args, **kwargs) return hand_down + else: + return getattr(self.data, item) else: raise AttributeError(f"No such attribute '{item}'") else: From 342347662924e324dc7b396ff498f1158cce50d6 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 29 Oct 2024 14:28:45 +0100 Subject: [PATCH 151/174] Adapt read_stat_file_legacy() to updated stats actor --- opengate/tests/utility.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/tests/utility.py b/opengate/tests/utility.py index d4cdf6db4..3b25052a0 100644 --- a/opengate/tests/utility.py +++ b/opengate/tests/utility.py @@ -112,7 +112,7 @@ def read_stat_file_legacy(filename): counts.nb_threads = int(a) except: counts.nb_threads = "?" - stat.user_output.stats.store_data(counts) + stat.user_output.stats.store_data("merged", counts) return stat From fc7322a1e23a2a7303cf3e4d2cc21dcce21e25ef Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Tue, 29 Oct 2024 14:29:29 +0100 Subject: [PATCH 152/174] Adapt assert_stats() and assert_stats_json() to updated stats actor --- opengate/tests/utility.py | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/opengate/tests/utility.py b/opengate/tests/utility.py index 3b25052a0..912c2fd71 100644 --- a/opengate/tests/utility.py +++ b/opengate/tests/utility.py @@ -128,21 +128,20 @@ def print_test(b, s): def assert_stats(stats_actor_1, stats_actor_2, tolerance=0): return assert_stats_json( - stats_actor_1.user_output.stats, - stats_actor_2.user_output.stats, + stats_actor_1, + stats_actor_2, tolerance, track_types_flag=stats_actor_1.track_types_flag, ) def assert_stats_json(stats_actor_1, stats_actor_2, tolerance=0, track_types_flag=None): - output1 = stats_actor_1 # .user_output.stats - output2 = stats_actor_2 # .user_output.stats - if track_types_flag is None: - track_types_flag = len(output1.track_types) > 0 - counts1 = output1.merged_data - counts2 = output2.merged_data + counts1 = stats_actor_1.counts + counts2 = stats_actor_2.counts + + if track_types_flag is None: + track_types_flag = len(counts1.track_types) > 0 if counts2.events != 0: event_d = counts1.events / counts2.events * 100 - 100 else: @@ -155,18 +154,18 @@ def assert_stats_json(stats_actor_1, stats_actor_2, tolerance=0, track_types_fla step_d = counts1.steps / counts2.steps * 100 - 100 else: step_d = 100 - if output2.pps != 0: - pps_d = output1.pps / output2.pps * 100 - 100 + if counts2.pps != 0: + pps_d = counts1.pps / counts2.pps * 100 - 100 else: pps_d = 100 - if output2.tps != 0: - tps_d = output1.tps / output2.tps * 100 - 100 + if counts2.tps != 0: + tps_d = counts1.tps / counts2.tps * 100 - 100 else: tps_d = 100 - if output2.sps != 0: - sps_d = output1.sps / output2.sps * 100 - 100 + if counts2.sps != 0: + sps_d = counts1.sps / counts2.sps * 100 - 100 else: sps_d = 100 @@ -198,17 +197,17 @@ def assert_stats_json(stats_actor_1, stats_actor_2, tolerance=0, track_types_fla print_test( True, - f"PPS: {output1.pps:.1f} {output2.pps:.1f} : " + f"PPS: {counts1.pps:.1f} {counts2.pps:.1f} : " f"{pps_d:+.1f}% speedup = x{(pps_d + 100) / 100:.1f}", ) print_test( True, - f"TPS: {output1.tps:.1f} {output2.tps:.1f} : " + f"TPS: {counts1.tps:.1f} {counts2.tps:.1f} : " f"{tps_d:+.1f}% speedup = x{(tps_d + 100) / 100:.1f}", ) print_test( True, - f"SPS: {output1.sps:.1f} {output2.sps:.1f} : " + f"SPS: {counts1.sps:.1f} {counts2.sps:.1f} : " f"{sps_d:+.1f}% speedup = x{(sps_d + 100) / 100:.1f}", ) From a8f9addfc2768ad26eb48d66185973f44dc237c5 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Wed, 30 Oct 2024 10:06:51 +0100 Subject: [PATCH 153/174] Merge sim_start_time and sim_stop_time via min/max --- opengate/actors/dataitems.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 82d793a69..ac8ad56d8 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -206,8 +206,8 @@ def inplace_merge_with(self, *other): # self.data.start_time = 0 # self.data.stop_time = 0 - # self.data.sim_start_time = 0 - # self.data.sim_stop_time = 0 + self.data.sim_start_time = min([o.counts.sim_start_time for o in other]) + self.data.sim_stop_time = max([o.counts.sim_stop_time for o in other]) @property def pps(self): From 2210788197957a54cfea14ca58b433374ec52684 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 00:54:22 +0100 Subject: [PATCH 154/174] Adapt assert_stats_json() to updated StatsActor --- opengate/tests/utility.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/tests/utility.py b/opengate/tests/utility.py index 912c2fd71..0915af612 100644 --- a/opengate/tests/utility.py +++ b/opengate/tests/utility.py @@ -236,7 +236,7 @@ def assert_stats_json(stats_actor_1, stats_actor_2, tolerance=0, track_types_fla n += int(t) b = n == counts1.tracks print_test(b, f"Tracks : {counts1.track_types}") - if "track_types" in counts2: + if hasattr(counts2, "track_types"): print_test(b, f"Tracks (ref): {counts2.track_types}") print_test(b, f"Tracks vs track_types : {counts1.tracks} {n}") is_ok = b and is_ok From d482c621d139e94ff60d8aac8fd147aca996e9ee Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 00:56:20 +0100 Subject: [PATCH 155/174] Improve handling of start_time and stop_time in StatisticsDataItem --- opengate/actors/dataitems.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index ac8ad56d8..ab79d1c46 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -3,6 +3,7 @@ import json from box import Box import platform +import datetime from ..exception import fatal, warning, GateImplementationError from ..utility import ( @@ -205,9 +206,19 @@ def inplace_merge_with(self, *other): self.data.track_types[k] = o.data.track_types[k] # self.data.start_time = 0 - # self.data.stop_time = 0 - self.data.sim_start_time = min([o.counts.sim_start_time for o in other]) - self.data.sim_stop_time = max([o.counts.sim_stop_time for o in other]) + self.data.start_time = min([o.data.start_time for o in other]) + self.data.stop_time = max([o.data.stop_time for o in other]) + + self.data.sim_start_time = min([o.data.sim_start_time for o in other]) + self.data.sim_stop_time = max([o.data.sim_stop_time for o in other]) + + @property + def start_date_time(self): + return datetime.datetime.fromtimestamp(int(self.data.start_time)).strftime('%c') + + @property + def stop_date_time(self): + return datetime.datetime.fromtimestamp(int(self.data.stop_time)).strftime('%c') @property def pps(self): From 1473df00fced5d8c54d0f7c8a5aa6f45df462733 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 00:57:01 +0100 Subject: [PATCH 156/174] Update GateSimulationStatisticsActor::GetCounts() --- .../opengate_lib/GateSimulationStatisticsActor.cpp | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp index 0f5ccfae7..dc03cae71 100644 --- a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp +++ b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp @@ -62,11 +62,15 @@ void GateSimulationStatisticsActor::StartSimulationAction() { py::dict GateSimulationStatisticsActor::GetCounts() { auto dd = py::dict( - "runs"_a = fCounts["runs"], "events"_a = fCounts["events"], - "tracks"_a = fCounts["tracks"], "steps"_a = fCounts["steps"], - "duration"_a = fCountsD["duration"], "init"_a = fCountsD["init"], - "start_time"_a = fCountsStr["start_time"], - "stop_time"_a = fCountsStr["stop_time"], "track_types"_a = fTrackTypes); + "runs"_a = fCounts["runs"], + "events"_a = fCounts["events"], + "tracks"_a = fCounts["tracks"], + "steps"_a = fCounts["steps"], + "duration"_a = fCountsD["duration"], + "init"_a = fCountsD["init"], + "start_time"_a = fCountsD["start_time"], + "stop_time"_a = fCountsD["stop_time"], + "track_types"_a = fTrackTypes); return dd; } From 014e950be90016f6aca27bc386ff042056fecfdb Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 00:58:26 +0100 Subject: [PATCH 157/174] In GateSimulationStatisticsActor: save start_time and stop_time as epoch time rather than date time str --- .../opengate_lib/GateSimulationStatisticsActor.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp index dc03cae71..00b43e4d6 100644 --- a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp +++ b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp @@ -156,12 +156,18 @@ void GateSimulationStatisticsActor::EndSimulationAction() { std::stringstream ss; auto t_c = std::chrono::system_clock::to_time_t(fStartTime); ss << strtok(std::ctime(&t_c), "\n"); - fCountsStr["start_time"] = ss.str(); + auto startTimeSec = std::chrono::time_point_cast(fStartTime); + long startTimeSecDouble = startTimeSec.time_since_epoch().count(); + fCountsD["start_time"] = startTimeSecDouble; +// fCountsStr["start_time"] = ss.str(); } { std::stringstream ss; auto t_c = std::chrono::system_clock::to_time_t(fStopTime); ss << strtok(std::ctime(&t_c), "\n"); - fCountsStr["stop_time"] = ss.str(); + auto stopTimeSec = std::chrono::time_point_cast(fStopTime); + long stopTimeSecDouble = stopTimeSec.time_since_epoch().count(); + fCountsD["stop_time"] = stopTimeSecDouble; +// fCountsStr["stop_time"] = ss.str(); } } From 451e7da713d193b13453e4e80b49302209038e74 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 01:04:36 +0100 Subject: [PATCH 158/174] adapt read_stat_file_json() to updated StatsActor --- opengate/tests/utility.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengate/tests/utility.py b/opengate/tests/utility.py index 0915af612..70ea3ecc5 100644 --- a/opengate/tests/utility.py +++ b/opengate/tests/utility.py @@ -71,7 +71,7 @@ def read_stat_file_json(filename): for k, d in data.items(): counts[k] = d["value"] stat = SimulationStatisticsActor(name=r) - stat.user_output.stats.store_data(counts) + stat.user_output.stats.store_data("merged", counts) return stat From f6f348b7476d0633bfe21d60149bd24f04ac6f19 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 01:05:57 +0100 Subject: [PATCH 159/174] remove redundant code in test008_dose_actor_multiproc.py --- .../tests/src/test008_dose_actor_multiproc.py | 40 +------------------ 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/opengate/tests/src/test008_dose_actor_multiproc.py b/opengate/tests/src/test008_dose_actor_multiproc.py index 8682ee4af..d6c57d563 100755 --- a/opengate/tests/src/test008_dose_actor_multiproc.py +++ b/opengate/tests/src/test008_dose_actor_multiproc.py @@ -121,45 +121,7 @@ path_edep_nproc1, path_edep_nproc4, tolerance=13, - sum_tolerance=1, - ) - # start simulation - t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_4" - sim.run(number_of_sub_processes=4) - t2 = time.time() - delta_t_nproc4 = t2 - t1 - - path_edep_nproc4 = dose.edep.get_output_path() - - t1 = time.time() - sim.output_dir = paths.output / Path(__file__.rstrip(".py")).stem / "nproc_1" - sim.run(number_of_sub_processes=1) - t2 = time.time() - delta_t_nproc1 = t2 - t1 - - path_edep_nproc1 = dose.edep.get_output_path() - - # t1 = time.time() - # sim.run(number_of_sub_processes=0) - # t2 = time.time() - # delta_t_no_subproc = t2 - t1 - - print("Simulation times: ") - print(f"One subprocess: {delta_t_nproc1}") - print( - f"Four subprocesses: {delta_t_nproc4}, speed-up: {delta_t_nproc1 / delta_t_nproc4}" - ) - # print(f"No subprocess: {delta_t_no_subproc}") - - # # tests - print("\nDifference for EDEP") - is_ok = utility.assert_images( - path_edep_nproc1, - path_edep_nproc4, - stat, - tolerance=13, - ignore_value=0, + ignore_value_data2=0, sum_tolerance=1, ) # From 9e6942014f6d32efec315e291fb775fb299b008d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 31 Oct 2024 00:08:52 +0000 Subject: [PATCH 160/174] [pre-commit.ci] Automatic python and c++ formatting --- .../GateSimulationStatisticsActor.cpp | 22 +++++++++---------- opengate/actors/dataitems.py | 4 ++-- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp index 00b43e4d6..1425cba47 100644 --- a/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp +++ b/core/opengate_core/opengate_lib/GateSimulationStatisticsActor.cpp @@ -62,15 +62,11 @@ void GateSimulationStatisticsActor::StartSimulationAction() { py::dict GateSimulationStatisticsActor::GetCounts() { auto dd = py::dict( - "runs"_a = fCounts["runs"], - "events"_a = fCounts["events"], - "tracks"_a = fCounts["tracks"], - "steps"_a = fCounts["steps"], - "duration"_a = fCountsD["duration"], - "init"_a = fCountsD["init"], + "runs"_a = fCounts["runs"], "events"_a = fCounts["events"], + "tracks"_a = fCounts["tracks"], "steps"_a = fCounts["steps"], + "duration"_a = fCountsD["duration"], "init"_a = fCountsD["init"], "start_time"_a = fCountsD["start_time"], - "stop_time"_a = fCountsD["stop_time"], - "track_types"_a = fTrackTypes); + "stop_time"_a = fCountsD["stop_time"], "track_types"_a = fTrackTypes); return dd; } @@ -156,18 +152,20 @@ void GateSimulationStatisticsActor::EndSimulationAction() { std::stringstream ss; auto t_c = std::chrono::system_clock::to_time_t(fStartTime); ss << strtok(std::ctime(&t_c), "\n"); - auto startTimeSec = std::chrono::time_point_cast(fStartTime); + auto startTimeSec = + std::chrono::time_point_cast(fStartTime); long startTimeSecDouble = startTimeSec.time_since_epoch().count(); fCountsD["start_time"] = startTimeSecDouble; -// fCountsStr["start_time"] = ss.str(); + // fCountsStr["start_time"] = ss.str(); } { std::stringstream ss; auto t_c = std::chrono::system_clock::to_time_t(fStopTime); ss << strtok(std::ctime(&t_c), "\n"); - auto stopTimeSec = std::chrono::time_point_cast(fStopTime); + auto stopTimeSec = + std::chrono::time_point_cast(fStopTime); long stopTimeSecDouble = stopTimeSec.time_since_epoch().count(); fCountsD["stop_time"] = stopTimeSecDouble; -// fCountsStr["stop_time"] = ss.str(); + // fCountsStr["stop_time"] = ss.str(); } } diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index ab79d1c46..15f4402bd 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -214,11 +214,11 @@ def inplace_merge_with(self, *other): @property def start_date_time(self): - return datetime.datetime.fromtimestamp(int(self.data.start_time)).strftime('%c') + return datetime.datetime.fromtimestamp(int(self.data.start_time)).strftime("%c") @property def stop_date_time(self): - return datetime.datetime.fromtimestamp(int(self.data.stop_time)).strftime('%c') + return datetime.datetime.fromtimestamp(int(self.data.stop_time)).strftime("%c") @property def pps(self): From 0eb6b81a86b3a98be496fde89ba0822b2898b453 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 10:22:29 +0100 Subject: [PATCH 161/174] activate ssh session on github --- .github/workflows/main.yml | 92 +++++++++++++++++++------------------- 1 file changed, 46 insertions(+), 46 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f53480a2d..6d8670bb4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -295,52 +295,52 @@ jobs: packages_dir: dist_opengate/ skip_existing: true -# ssh_session: -# env: -# GEANT4_VERSION: 'v11.2.1' -# ITK_VERSION: 'v5.2.1' -# runs-on: macos-13 -# steps: -# - name: Checkout github repo -# uses: actions/checkout@v4 -# - name: Checkout submodules -# shell: bash -l {0} -# run: | -# export GIT_SSL_NO_VERIFY=1 -# git submodule update --init --recursive -# - name: Set up Python -# uses: actions/setup-python@v5 -# with: -# python-version: 3.9 -# architecture: 'x64' -# - name: Get OS version -# id: get-os-version -# shell: bash -l {0} -# run: | -# varOS=`sw_vers | grep "ProductVersion:"` -# varOS="${varOS#*:}" -# echo "release=${varOS:1}" >> $GITHUB_OUTPUT -# - name: Cache modules -# id: cache_opengate_core_dependencies -# uses: actions/cache@v4 -# with: -# path: ~/software -# key: ${{ runner.os }}-${{ steps.get-os-version.outputs.release }}_geant4_${{ env.GEANT4_VERSION }}_itk_${{ env.ITK_VERSION }}_build2 -# restore-keys: ${{ runner.os }}-${{ steps.get-os-version.outputs.release }}_geant4_${{ env.GEANT4_VERSION }}_itk_${{ env.ITK_VERSION }}_build2 -# - uses: conda-incubator/setup-miniconda@v3 -# with: -# miniconda-version: "latest" -# auto-update-conda: true -# activate-environment: opengate_core -# python-version: 3.9 -# - name: Set up Homebrew -# id: set-up-homebrew -# uses: Homebrew/actions/setup-homebrew@master -# - name: Start SSH session -# uses: luchihoratiu/debug-via-ssh@main -# with: -# NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN }} -# SSH_PASS: ${{ secrets.SSH_PASS }} + ssh_session: + env: + GEANT4_VERSION: 'v11.2.1' + ITK_VERSION: 'v5.2.1' + runs-on: macos-13 + steps: + - name: Checkout github repo + uses: actions/checkout@v4 + - name: Checkout submodules + shell: bash -l {0} + run: | + export GIT_SSL_NO_VERIFY=1 + git submodule update --init --recursive + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.9 + architecture: 'x64' + - name: Get OS version + id: get-os-version + shell: bash -l {0} + run: | + varOS=`sw_vers | grep "ProductVersion:"` + varOS="${varOS#*:}" + echo "release=${varOS:1}" >> $GITHUB_OUTPUT + - name: Cache modules + id: cache_opengate_core_dependencies + uses: actions/cache@v4 + with: + path: ~/software + key: ${{ runner.os }}-${{ steps.get-os-version.outputs.release }}_geant4_${{ env.GEANT4_VERSION }}_itk_${{ env.ITK_VERSION }}_build2 + restore-keys: ${{ runner.os }}-${{ steps.get-os-version.outputs.release }}_geant4_${{ env.GEANT4_VERSION }}_itk_${{ env.ITK_VERSION }}_build2 + - uses: conda-incubator/setup-miniconda@v3 + with: + miniconda-version: "latest" + auto-update-conda: true + activate-environment: opengate_core + python-version: 3.9 + - name: Set up Homebrew + id: set-up-homebrew + uses: Homebrew/actions/setup-homebrew@master + - name: Start SSH session + uses: luchihoratiu/debug-via-ssh@main + with: + NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN }} + SSH_PASS: ${{ secrets.SSH_PASS }} test_wheel: runs-on: ${{ matrix.os }} From bbec50013e05deb110884372518fa95338e0992d Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 11:50:15 +0100 Subject: [PATCH 162/174] update test082_multiprocessing_1.py --- .../tests/src/test082_multiprocessing_1.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/opengate/tests/src/test082_multiprocessing_1.py b/opengate/tests/src/test082_multiprocessing_1.py index 90bf61400..6ee1c2544 100755 --- a/opengate/tests/src/test082_multiprocessing_1.py +++ b/opengate/tests/src/test082_multiprocessing_1.py @@ -2,8 +2,7 @@ # -*- coding: utf-8 -*- from opengate.utility import g4_units import opengate as gate -from opengate.tests.utility import get_default_test_paths - +from opengate.tests.utility import get_default_test_paths, test_ok if __name__ == "__main__": paths = get_default_test_paths(__file__, output_folder="test080") @@ -22,13 +21,20 @@ n_proc = 4 * len(sim.run_timing_intervals) - output = sim.run(number_of_sub_processes=n_proc) + sim.run(number_of_sub_processes=n_proc) + + ids = [m.simulation_id for m in sim.meta_data_per_process.values()] + print("ID of the main sim:") + print(id(sim)) + print(f"ID of the sims in subprocesses:") + for _id in ids: + print(_id) - print("*** output ***") - for o in output: - print(o) + # check that the ID of the Simulation instance in the main process is + # different from the IDs in the subprocesses + is_ok = id(sim) not in ids - print(f"ID of the main sim: {id(sim)}") + # Check that the IDs in the subprocesses are mutually independent + is_ok = is_ok and len(set(ids)) == len(ids) - ids = [o.simulation_id for o in output] - assert id(sim) not in ids + test_ok(is_ok) From ec56329db4c3b908df2a6aed0bcc7873d77eb2d0 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 11:50:57 +0100 Subject: [PATCH 163/174] pick up simulation_id as meta_data after a simulation --- opengate/managers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opengate/managers.py b/opengate/managers.py index 2944d4ad8..963ea09f8 100644 --- a/opengate/managers.py +++ b/opengate/managers.py @@ -1218,6 +1218,7 @@ def __init__(self, *args, simulation_output=None, **kwargs): self.current_random_seed = None self.number_of_sub_processes = None self.start_new_process = None + self.simulation_id = None if simulation_output is not None: self.extract_from_simulation_output(simulation_output) @@ -1247,6 +1248,7 @@ def extract_from_simulation_output(self, *sim_output): self.user_hook_log.extend(so.user_hook_log) if self.current_random_seed is None: self.current_random_seed = so.current_random_seed + self.simulation_id = so.simulation_id def setter_hook_verbose_level(self, verbose_level): From 039ef82d276e54594f7424f5584b028f2d487298 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 11:54:05 +0100 Subject: [PATCH 164/174] add explicit process_cls() for MultiProcessHandler classes --- opengate/processing.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/opengate/processing.py b/opengate/processing.py index 44b8e64a2..5967a0892 100644 --- a/opengate/processing.py +++ b/opengate/processing.py @@ -2,7 +2,7 @@ import queue from .exception import fatal -from .base import GateObject +from .base import GateObject, process_cls # define thin wrapper function to handle the queue @@ -142,3 +142,7 @@ def generate_dispatch_configuration(self): process_index += 1 self.dispatch_configuration = dispatch_configuration return dispatch_configuration + + +process_cls(MultiProcessingHandlerBase) +process_cls(MultiProcessingHandlerEqualPerRunTimingInterval) From dd3068cdd86577987e654d8ff9c54b7d5612443f Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 12:10:53 +0100 Subject: [PATCH 165/174] Update file names in test081_simulation_optigan_with_random_seed.py --- .../tests/src/test081_simulation_optigan_with_random_seed.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opengate/tests/src/test081_simulation_optigan_with_random_seed.py b/opengate/tests/src/test081_simulation_optigan_with_random_seed.py index b7b1c04d1..f3365bd1d 100755 --- a/opengate/tests/src/test081_simulation_optigan_with_random_seed.py +++ b/opengate/tests/src/test081_simulation_optigan_with_random_seed.py @@ -10,7 +10,7 @@ import os if __name__ == "__main__": - paths = tu.get_default_test_paths(__file__, output_folder="test075_optigan") + paths = tu.get_default_test_paths(__file__, output_folder="test081_optigan") # create simulation sim = gate.Simulation() @@ -76,7 +76,7 @@ "TrackID", ] - phsp_actor.output_filename = "test075_simulation_optigan_with_random_seed_600.root" + phsp_actor.output_filename = "simulation_optigan_with_random_seed_600.root" # add a kill actor to the crystal ka = sim.add_actor("KillActor", "kill_actor2") From 0ac5519b28a1eca2b836cb0a6b389d8f3e10875e Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Thu, 31 Oct 2024 12:19:01 +0100 Subject: [PATCH 166/174] Adapt test006_runs.py to updated StatsActorOutput --- opengate/tests/src/test006_runs.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/opengate/tests/src/test006_runs.py b/opengate/tests/src/test006_runs.py index b9f00d80c..a23d9e7ca 100755 --- a/opengate/tests/src/test006_runs.py +++ b/opengate/tests/src/test006_runs.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +from box import Box + import opengate as gate import opengate.tests.utility as utility @@ -86,13 +88,16 @@ print(stats) stats_ref = gate.actors.miscactors.SimulationStatisticsActor(name="stat_ref") - c = stats_ref.counts + c = Box() c.runs = 3 c.events = 7800 c.tracks = 37584 # 56394 c.steps = 266582 # 217234 # stats_ref.pps = 4059.6 3 3112.2 c.duration = 1 / 4059.6 * 7800 * sec + + stats_ref.user_output.stats.store_data("merged", c) + print("-" * 80) is_ok = utility.assert_stats(stats, stats_ref, 0.185) From 38607980bcc0d7a0eefd64c34a5d2a1767d99eb8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Nov 2024 17:11:52 +0000 Subject: [PATCH 167/174] [pre-commit.ci] Automatic python and c++ formatting --- opengate/actors/actoroutput.py | 2 -- opengate/actors/miscactors.py | 8 ++++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index e723818d5..b10e5cce5 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -546,8 +546,6 @@ def merge_data_from_actor_output( if len(merged_data_to_import) > 0: self.merged_data = merge_data(merged_data_to_import) - - # this intermediate base class defines a class attribute data_container_class, # but leaves it as None. Specific classes need to set it to the correct class or tuple of classes data_container_class = None diff --git a/opengate/actors/miscactors.py b/opengate/actors/miscactors.py index 2f4f0e2ff..cf43146ae 100644 --- a/opengate/actors/miscactors.py +++ b/opengate/actors/miscactors.py @@ -269,10 +269,10 @@ class SplittingActorBase(ActorBase): class ComptSplittingActor(SplittingActorBase, g4.GateOptrComptSplittingActor): """This splitting actor enables process-based splitting specifically for Compton interactions. Each time a Compton - process occurs, its behavior is modified by generating multiple Compton scattering tracks - (splitting factor - 1 additional tracks plus the original) associated with the initial particle. - Compton electrons produced in the interaction are also included, in accordance with the secondary cut settings - provided by the user. + process occurs, its behavior is modified by generating multiple Compton scattering tracks + (splitting factor - 1 additional tracks plus the original) associated with the initial particle. + Compton electrons produced in the interaction are also included, in accordance with the secondary cut settings + provided by the user. """ # hints for IDE From fe4bae0dca9357b8267d3e6424794bb8eea4b464 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 29 Nov 2024 18:16:32 +0100 Subject: [PATCH 168/174] Remove obsolete variable available_data_container_classes --- opengate/actors/dataitems.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/opengate/actors/dataitems.py b/opengate/actors/dataitems.py index 4b0691651..566d8a351 100644 --- a/opengate/actors/dataitems.py +++ b/opengate/actors/dataitems.py @@ -936,13 +936,3 @@ def merge_data(list_of_data): for d in list_of_data[1:]: merged_data.inplace_merge_with(d) return merged_data - - -available_data_container_classes = { - "SingleItkImage": SingleItkImage, - "SingleMeanItkImage": SingleMeanItkImage, - "QuotientMeanItkImage": QuotientMeanItkImage, - "SingleArray": SingleArray, - "DoubleArray": DoubleArray, - "SingleItkImageWithVariance": SingleItkImageWithVariance, -} From 2ed23a7f31ea1fb0e444bb6f501d3747a46449b2 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 29 Nov 2024 18:19:12 +0100 Subject: [PATCH 169/174] remove obsolete imports --- opengate/actors/actoroutput.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index b10e5cce5..5e2bcc361 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -1,7 +1,5 @@ import copy import inspect -from box import Box -from typing import Optional import uproot import tqdm import numpy as np From 611ee812b6972d046c048b178128ca47db73a2c0 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 29 Nov 2024 18:19:29 +0100 Subject: [PATCH 170/174] remove obsolete method available_data_container_classes() --- opengate/actors/actoroutput.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 5e2bcc361..4b0ffac04 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -490,9 +490,6 @@ class ActorOutputUsingDataItemContainer(ActorOutputBase): ), } - def merge_data_from_runs(self): - self.merged_data = merge_data(list(self.data_per_run.values())) - def merge_into_merged_data(self, data): if self.merged_data is None: self.merged_data = data From b2af2d7a77c5a5b775bc86b8b296c6735d569250 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Fri, 29 Nov 2024 18:24:33 +0100 Subject: [PATCH 171/174] Put methods back in order after merge --- opengate/actors/actoroutput.py | 106 ++++++++++++--------------------- 1 file changed, 39 insertions(+), 67 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 4b0ffac04..3d537cccc 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -490,57 +490,6 @@ class ActorOutputUsingDataItemContainer(ActorOutputBase): ), } - def merge_into_merged_data(self, data): - if self.merged_data is None: - self.merged_data = data - else: - self.merged_data = merge_data([self.merged_data, data]) - - def end_of_run(self, run_index): - if self.merge_data_after_simulation is True: - self.merge_into_merged_data(self.data_per_run[run_index]) - if self.keep_data_per_run is False: - self.data_per_run.pop(run_index) - - def end_of_simulation(self, **kwargs): - try: - self.write_data_if_requested(which="all", **kwargs) - except NotImplementedError: - raise GateImplementationError( - "Unable to run end_of_simulation " - f"in user_output {self.name} of actor {self.belongs_to_actor.name}" - f"because the class does not implement a write_data_if_requested() " - f"and/or write_data() method. " - f"A developer needs to fix this. " - ) - - def merge_data_from_actor_output( - self, *actor_output, discard_existing_data=True, **kwargs - ): - run_indices_to_import = set() - for ao in actor_output: - run_indices_to_import.union(ao.data_per_run.keys()) - which_output_per_run_index = dict( - [ - (r, [ao for ao in actor_output if r in ao.data_per_run]) - for r in run_indices_to_import - ] - ) - for r in run_indices_to_import: - data_to_import = [ - ao.data_per_run[r] for ao in which_output_per_run_index[r] - ] - if discard_existing_data is False and r in self.data_per_run: - data_to_import.append(self.data_per_run[r]) - self.data_per_run[r] = merge_data(data_to_import) - merged_data_to_import = [ - ao.merged_data for ao in actor_output if ao.merged_data is not None - ] - if discard_existing_data is False and self.merged_data is not None: - merged_data_to_import.append(self.merged_data) - if len(merged_data_to_import) > 0: - self.merged_data = merge_data(merged_data_to_import) - # this intermediate base class defines a class attribute data_container_class, # but leaves it as None. Specific classes need to set it to the correct class or tuple of classes data_container_class = None @@ -855,6 +804,45 @@ def write_data_if_requested(self, which="all", item="all", **kwargs): def end_of_simulation(self, item="all", **kwargs): self.write_data_if_requested(which="all", item=item) + def merge_into_merged_data(self, data): + if self.merged_data is None: + self.merged_data = data + else: + self.merged_data = merge_data([self.merged_data, data]) + + def end_of_run(self, run_index): + if self.merge_data_after_simulation is True: + self.merge_into_merged_data(self.data_per_run[run_index]) + if self.keep_data_per_run is False: + self.data_per_run.pop(run_index) + + def merge_data_from_actor_output( + self, *actor_output, discard_existing_data=True, **kwargs + ): + run_indices_to_import = set() + for ao in actor_output: + run_indices_to_import.union(ao.data_per_run.keys()) + which_output_per_run_index = dict( + [ + (r, [ao for ao in actor_output if r in ao.data_per_run]) + for r in run_indices_to_import + ] + ) + for r in run_indices_to_import: + data_to_import = [ + ao.data_per_run[r] for ao in which_output_per_run_index[r] + ] + if discard_existing_data is False and r in self.data_per_run: + data_to_import.append(self.data_per_run[r]) + self.data_per_run[r] = merge_data(data_to_import) + merged_data_to_import = [ + ao.merged_data for ao in actor_output if ao.merged_data is not None + ] + if discard_existing_data is False and self.merged_data is not None: + merged_data_to_import.append(self.merged_data) + if len(merged_data_to_import) > 0: + self.merged_data = merge_data(merged_data_to_import) + class ActorOutputImage(ActorOutputUsingDataItemContainer): @@ -942,22 +930,6 @@ class ActorOutputStatisticsActor(ActorOutputUsingDataItemContainer): "allowed_values": ("json", "legacy"), }, ), - # "output_filename": ( - # "auto", - # { - # "doc": "Filename for the data represented by this actor output. " - # "Relative paths and filenames are taken " - # "relative to the global simulation output folder " - # "set via the Simulation.output_dir option. ", - # "setter_hook": _setter_hook_stats_actor_output_filename, - # }, - # ), - # "write_to_disk": ( - # False, - # { - # "doc": "Should the output be written to disk, or only kept in memory? ", - # }, - # ), } def __init__(self, *args, **kwargs): From 2060caec790c04f6f296b627db2e53f2dc63f654 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sat, 30 Nov 2024 00:03:26 +0100 Subject: [PATCH 172/174] Remove obsolete merge_into_merged_data --- opengate/actors/actoroutput.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 3d537cccc..0b2c8af5e 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -804,12 +804,6 @@ def write_data_if_requested(self, which="all", item="all", **kwargs): def end_of_simulation(self, item="all", **kwargs): self.write_data_if_requested(which="all", item=item) - def merge_into_merged_data(self, data): - if self.merged_data is None: - self.merged_data = data - else: - self.merged_data = merge_data([self.merged_data, data]) - def end_of_run(self, run_index): if self.merge_data_after_simulation is True: self.merge_into_merged_data(self.data_per_run[run_index]) From 51e0bc9695fb9a90cd54aa518087ef8675b00998 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sat, 30 Nov 2024 00:04:05 +0100 Subject: [PATCH 173/174] Correct wrongly merged code --- opengate/actors/actoroutput.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/opengate/actors/actoroutput.py b/opengate/actors/actoroutput.py index 0b2c8af5e..e51c90b90 100644 --- a/opengate/actors/actoroutput.py +++ b/opengate/actors/actoroutput.py @@ -806,10 +806,14 @@ def end_of_simulation(self, item="all", **kwargs): def end_of_run(self, run_index): if self.merge_data_after_simulation is True: - self.merge_into_merged_data(self.data_per_run[run_index]) + self.merged_data.inplace_merge_with(self.data_per_run[run_index]) if self.keep_data_per_run is False: self.data_per_run.pop(run_index) + def start_of_simulation(self, **kwargs): + if self.merge_data_after_simulation is True: + self.merged_data = self.data_container_class(belongs_to=self) + def merge_data_from_actor_output( self, *actor_output, discard_existing_data=True, **kwargs ): From 4186880a08acf983014676550ebc88f22774a1e2 Mon Sep 17 00:00:00 2001 From: Nils Krah Date: Sat, 30 Nov 2024 00:05:02 +0100 Subject: [PATCH 174/174] renamed test082 to test084 --- ...{test082_multiprocessing_1.py => test084_multiprocessing_1.py} | 0 ...tiprocessing_handler.py => test084_multiprocessing_handler.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename opengate/tests/src/{test082_multiprocessing_1.py => test084_multiprocessing_1.py} (100%) rename opengate/tests/src/{test082_multiprocessing_handler.py => test084_multiprocessing_handler.py} (100%) diff --git a/opengate/tests/src/test082_multiprocessing_1.py b/opengate/tests/src/test084_multiprocessing_1.py similarity index 100% rename from opengate/tests/src/test082_multiprocessing_1.py rename to opengate/tests/src/test084_multiprocessing_1.py diff --git a/opengate/tests/src/test082_multiprocessing_handler.py b/opengate/tests/src/test084_multiprocessing_handler.py similarity index 100% rename from opengate/tests/src/test082_multiprocessing_handler.py rename to opengate/tests/src/test084_multiprocessing_handler.py