Commit 93ec4f86 authored by Thomas Vincent's avatar Thomas Vincent

Add a per process cache to avoid reading the same data multiple time

parent 541a3c70
......@@ -187,6 +187,7 @@ class PeakFitter(Thread):
self.__set_status(self.RUNNING)
pool = multiprocessing.Pool(self.__n_proc)
fit_results = []
for result in pool.imap(
functools.partial(_fit_process,
......@@ -256,6 +257,10 @@ class PeakFitter(Thread):
return results
# Per process cache for fit process
_per_process_cache = None
def _fit_process(index,
qspace_f,
fit_type=FitTypes.GAUSSIAN,
......@@ -278,11 +283,17 @@ def _fit_process(index,
:return: Fit results as a list of results for dim0, dim1 and dim2
:rtype: List[List[Union[float,bool]]]
"""
# Read data from file
with QSpaceH5.QSpaceH5(qspace_f) as qspace_h5:
axes = qspace_h5.qspace_dimension_values
hits = qspace_h5.histo
qspace = qspace_h5.qspace_slice(index)
global _per_process_cache
if _per_process_cache is None: # Initialize per process cache
qspace_h5 = QSpaceH5.QSpaceH5(qspace_f)
_per_process_cache = (
qspace_h5, qspace_h5.qspace_dimension_values, qspace_h5.histo)
# Retrieve data/file from cache
qspace_h5, axes, hits = _per_process_cache
# Load qspace
qspace = qspace_h5.qspace_slice(index)
# apply Qspace ROI
if roiIndices is not None:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment