Commit 8c681b52 authored by Pierre Paleo's avatar Pierre Paleo Committed by Pierre Paleo
Browse files

Move code to a dedicated module

parent 3f59342e
......@@ -13,6 +13,7 @@ val_to_nxkey = {
"darks": ImageKey.DARK_FIELD.value,
"projections": ImageKey.PROJECTION.value,
}
dataset_infos = {
"num_radios": None,
"num_darks": None,
......@@ -110,7 +111,6 @@ class DatasetAnalyzer(object):
return radios_not_used
class EDFDatasetAnalyzer(DatasetAnalyzer):
"""
EDF Dataset analyzer for legacy ESRF acquisitions
......@@ -188,7 +188,6 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
return self._rot_angles
def analyze_dataset(dataset_path):
if not(os.path.isdir(dataset_path)):
if not(os.path.isfile(dataset_path)):
......@@ -201,176 +200,3 @@ def analyze_dataset(dataset_path):
dataset_structure = dataset_analyzer_class(dataset_path)
return dataset_structure
class FlatFieldLoader:
"""
A helper class to load flats and darks, or to compute the final ones.
At ESRF, several darks/flats series are acquired during a scan.
Each series undergoes a reduction (mean, median, ...) to get a "final" image.
For example, there is one series of flats in the beginning, and one in the end ;
thus there are two corresponding "final" flats.
"""
def __init__(
self, data_url, image_keys, lookup_files=None, results_file=None, logger=None
):
"""
Initialize a FlatFieldLoader helper.
Parameters
-----------
data_url: silx.io.url.DataUrl
A DataUrl object containing the URL to the volume data.
image_keys: list of int
List of keys corresponding to each image of the data volume.
See Nexus Format specification, NXtomo, "image_key".
lookup_files: list of DataUrl, optional
List of paths (DataUrl) to inspect to load existing "final" flats/darks.
If something is found one of these URL, the data will be loaded from there.
results_file: DataUrl, optional
File path where to write the results. By default nothing is saved.
Mind the difference with `lookup_files`: this parameter is for writing.
If the file already exists, its data will be overwritten !
logger: Logger object, optional
Logging object
"""
self.data_url = data_url
self.image_keys = image_keys
self.lookup_files = lookup_files or []
self.results_file = results_file
self.logger = LoggerOrPrint(logger)
def browse_existing_flatfield_results(self):
"""
Attempt at loading already computed "final" darks and flats from existing files.
"""
basedir = os.path.dirname(self.data_url.file_path())
existing_data = None
for fpath in self.lookup_files:
existing_data = self.load_existing_flatfield(fpath)
if existing_data is not None:
self.logger.info("Loaded flats/darks from %s" % (fpath.file_path()))
break
if existing_data is None:
self.logger.debug("Flats/darks not loaded from any file")
return existing_data
def load_existing_flatfield(self, file_url):
"""
Attempt at loading already computed "final" darks and flats from an existing file.
Parameters
-----------
fname: DataUrl
URL of the flat/dark to load.
"""
results_path = os.path.join(file_url.data_path(), "results") # NXProcess
res = {}
try:
with HDF5File(file_url.file_path(), "r", swmr=True) as f:
results = f[results_path]
for what in ["darks", "flats"]:
res[what] = {}
for key, val in results[what].items():
res[what][key] = val[:]
except Exception as exc:
self.logger.error(
"Could not load darks/flats from %s: %s"
% (file_url.file_path(), str(exc))
)
res = None
return res
@staticmethod
def get_data_slices(image_keys, key):
"""
Return indices in the data volume where images correspond to a certain key.
Parameters
----------
image_keys: list of int
List of keys corresponding to each image of the data volume.
See Nexus Format specification, NXtomo, "image_key".
key: int
Key to search in the `image_keys` list.
Returns
--------
slices: list of slice
A list where each item is a slice.
"""
diffs = np.diff(image_keys == key)
indices = np.arange(diffs.size)[diffs]
assert indices.size % 2 == 0 # Fails when only 1 image
indices += 1
slices = []
for i in range(0, indices.size, 2):
start_idx = indices[i]
end_idx = indices[i+1]
slices.append(
slice(start_idx, end_idx)
)
return slices
def get_data_chunk(self, data_slice):
"""
Get a data chunk from the data volume whose path was specified at class instantiation.
"""
with HDF5File(self.data_url.file_path(), "r", swmr=True) as f:
data_raw = f[self.data_url.data_path()][data_slice]
return data_raw
def apply_reduction(self, volume):
"""
Apply a reduction function on a data volume.
The data is assumed to be three dimensional, a "stack of images".
Parameters
-----------
volume: numpy.ndarray
Data volume
"""
return self.reduction_function(volume, axis=0)
def write_results(self, data):
if self.results_file is None:
return
writer = NXProcessWriter(self.results_file, entry=None, overwrite=True)
def get_final_images(self):
"""
Main function of this class.
This function first attempts at loading already-computed flat/dark.
If nothing is found, then the final flats/darks are computed from raw ones.
Returns
-------
imgs: dict
Dictionary where the key is the index of the final flats/darks,
and the values are the corresponding reduced data.
"""
existing_imgs = self.browse_existing_flatfield_results()
if existing_imgs is not None:
return existing_imgs
res = {"flats": {}, "darks": {}}
for what in res.keys():
img_slices = self.get_data_slices(self.image_keys, val_to_nxkey[what])
if img_slices == []:
self.logger.error("No %s found in %s" % (what, self.data_url.file_path()))
res[what] = None
for data_slice in img_slices:
data_chunk = self.get_data_chunk(data_slice)
img = np.mean(data_chunk)
res[what][data_slice.start] = img
return res
class NXFlatField:
"""
A helper class to load flats and darks, or to compute the final ones.
At ESRF, several darks/flats series are acquired during a scan.
Each series undergoes a reduction (mean, median, ...) to get a "final" image.
For example, there is one series of flats in the beginning, and one in the end ;
thus there are two corresponding "final" flats.
"""
def __init__(
self, data_url, image_keys, lookup_files=None, results_file=None, logger=None
):
"""
Initialize a FlatFieldLoader helper.
Parameters
-----------
data_url: silx.io.url.DataUrl
A DataUrl object containing the URL to the volume data.
image_keys: list of int
List of keys corresponding to each image of the data volume.
See Nexus Format specification, NXtomo, "image_key".
lookup_files: list of DataUrl, optional
List of paths (DataUrl) to inspect to load existing "final" flats/darks.
If something is found one of these URL, the data will be loaded from there.
results_file: DataUrl, optional
File path where to write the results. By default nothing is saved.
Mind the difference with `lookup_files`: this parameter is for writing.
If the file already exists, its data will be overwritten !
logger: Logger object, optional
Logging object
"""
self.data_url = data_url
self.image_keys = image_keys
self.lookup_files = lookup_files or []
self.results_file = results_file
self.logger = LoggerOrPrint(logger)
def browse_existing_flatfield_results(self):
"""
Attempt at loading already computed "final" darks and flats from existing files.
"""
basedir = os.path.dirname(self.data_url.file_path())
existing_data = None
for fpath in self.lookup_files:
existing_data = self.load_existing_flatfield(fpath)
if existing_data is not None:
self.logger.info("Loaded flats/darks from %s" % (fpath.file_path()))
break
if existing_data is None:
self.logger.debug("Flats/darks not loaded from any file")
return existing_data
def load_existing_flatfield(self, file_url):
"""
Attempt at loading already computed "final" darks and flats from an existing file.
Parameters
-----------
fname: DataUrl
URL of the flat/dark to load.
"""
results_path = os.path.join(file_url.data_path(), "results") # NXProcess
res = {}
try:
with HDF5File(file_url.file_path(), "r", swmr=True) as f:
results = f[results_path]
for what in ["darks", "flats"]:
res[what] = {}
for key, val in results[what].items():
res[what][key] = val[:]
except Exception as exc:
self.logger.error(
"Could not load darks/flats from %s: %s"
% (file_url.file_path(), str(exc))
)
res = None
return res
@staticmethod
def get_data_slices(image_keys, key):
"""
Return indices in the data volume where images correspond to a certain key.
Parameters
----------
image_keys: list of int
List of keys corresponding to each image of the data volume.
See Nexus Format specification, NXtomo, "image_key".
key: int
Key to search in the `image_keys` list.
Returns
--------
slices: list of slice
A list where each item is a slice.
"""
diffs = np.diff(image_keys == key)
indices = np.arange(diffs.size)[diffs]
assert indices.size % 2 == 0 # Fails when only 1 image
indices += 1
slices = []
for i in range(0, indices.size, 2):
start_idx = indices[i]
end_idx = indices[i+1]
slices.append(
slice(start_idx, end_idx)
)
return slices
def get_data_chunk(self, data_slice):
"""
Get a data chunk from the data volume whose path was specified at class instantiation.
"""
with HDF5File(self.data_url.file_path(), "r", swmr=True) as f:
data_raw = f[self.data_url.data_path()][data_slice]
return data_raw
def apply_reduction(self, volume):
"""
Apply a reduction function on a data volume.
The data is assumed to be three dimensional, a "stack of images".
Parameters
-----------
volume: numpy.ndarray
Data volume
"""
return self.reduction_function(volume, axis=0)
def write_results(self, data):
if self.results_file is None:
return
writer = NXProcessWriter(self.results_file, entry=None, overwrite=True)
def get_final_images(self):
"""
Main function of this class.
This function first attempts at loading already-computed flat/dark.
If nothing is found, then the final flats/darks are computed from raw ones.
Returns
-------
imgs: dict
Dictionary where the key is the index of the final flats/darks,
and the values are the corresponding reduced data.
"""
existing_imgs = self.browse_existing_flatfield_results()
if existing_imgs is not None:
return existing_imgs
res = {"flats": {}, "darks": {}}
for what in res.keys():
img_slices = self.get_data_slices(self.image_keys, val_to_nxkey[what])
if img_slices == []:
self.logger.error("No %s found in %s" % (what, self.data_url.file_path()))
res[what] = None
for data_slice in img_slices:
data_chunk = self.get_data_chunk(data_slice)
img = np.mean(data_chunk)
res[what][data_slice.start] = img
return res
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment