Commit 2cb2ce13 authored by Pierre Paleo's avatar Pierre Paleo
Browse files

dataset_analyzer: bugfix when loading darks/flats from nabu_processes.h5

parent 8725ba88
Pipeline #33834 passed with stages
in 2 minutes and 26 seconds
......@@ -5,6 +5,7 @@ from tomoscan.esrf.edfscan import EDFTomoScan
from tomoscan.esrf.hdf5scan import HDF5TomoScan
from ..thirdparty.tomwer_load_flats_darks import get_flats_frm_process_file, get_darks_frm_process_file
from .nxflatfield import NXFlatField
from .utils import is_hdf5_extension
dataset_infos = {
"num_radios": None,
......@@ -133,7 +134,7 @@ class EDFDatasetAnalyzer(DatasetAnalyzer):
"""
EDF Dataset analyzer for legacy ESRF acquisitions
"""
def __init__(self, location, n_frames=1):
def __init__(self, location, n_frames=1, processes_file=None, extra_options=None):
"""
EDF Dataset analyzer.
......@@ -142,7 +143,7 @@ class EDFDatasetAnalyzer(DatasetAnalyzer):
location: str
Location of the folder containing EDF files
"""
super().__init__(location)
super().__init__(location, processes_file=processes_file, extra_options=extra_options)
if not(os.path.isdir(location)):
raise ValueError("%s is not a directory" % location)
self._init_dataset_scan("edf", n_frames=n_frames)
......@@ -156,7 +157,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
"""
HDF5 dataset analyzer
"""
def __init__(self, location):
def __init__(self, location, processes_file=None, extra_options=None):
"""
HDF5 Dataset analyzer.
......@@ -165,7 +166,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
location: str
Location of the HDF5 master file
"""
super().__init__(location)
super().__init__(location, processes_file=processes_file, extra_options=extra_options)
if not(os.path.isfile(location)):
raise ValueError("%s is not a file" % location)
self._init_dataset_scan("hdf5")
......@@ -216,7 +217,9 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
flats_reduction="mean",
darks_reduction="mean"
)
self._nxflatfield.get_final_images()
res = self._nxflatfield.get_final_urls(entry="entry", process_name="flat_field")
self.flats = res["flats"]
self.darks = res["darks"]
def _get_rotation_angles(self):
......@@ -243,15 +246,17 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
return self._get_dataset_hdf5_url()
def analyze_dataset(dataset_path, extra_options=None):
def analyze_dataset(dataset_path, processes_file=None, extra_options=None):
if not(os.path.isdir(dataset_path)):
if not(os.path.isfile(dataset_path)):
raise ValueError("Error: %s no such file or directory" % dataset_path)
if not(is_hdf5_extension(os.path.splitext(dataset_path)[-1])):
if not(is_hdf5_extension(os.path.splitext(dataset_path)[-1].replace(".", ""))):
raise ValueError("Error: expected a HDF5 file")
dataset_analyzer_class = HDF5DatasetAnalyzer
else: # directory -> assuming EDF
dataset_analyzer_class = EDFDatasetAnalyzer
dataset_struct = dataset_analyzer_class(dataset_path, extra_options=extra_options)
return dataset_struct
dataset_structure = dataset_analyzer_class(
dataset_path, processes_file=processes_file, extra_options=extra_options
)
return dataset_structure
......@@ -62,6 +62,7 @@ class NXFlatField:
self.reduction_function = {}
self._set_reduction_method("flats", flats_reduction)
self._set_reduction_method("darks", darks_reduction)
self._loaded_from = None
def _set_reduction_method(self, what, reduction_method):
......@@ -84,6 +85,7 @@ class NXFlatField:
existing_data = self.load_existing_flatfield(fpath)
if existing_data is not None:
self.logger.info("Loaded flats/darks from %s" % (fpath.file_path()))
self._loaded_from = fpath.file_path()
break
if existing_data is None:
self.logger.debug("Flats/darks not loaded from any file")
......@@ -204,7 +206,7 @@ class NXFlatField:
entry: str, optional
HDF5 entry name.
process_name: str, optional
Process name. Default is "flat_field".
Process name. Default is "flat_field_images".
tmp_fallback: bool, optional
Whether to write to /tmp if writing fails. Default is True.
"""
......@@ -275,3 +277,18 @@ class NXFlatField:
self.write_results(res)
return res
def get_final_urls(self, entry="entry", process_name="flat_field"):
res = self.get_final_images(dont_write_results=False)
results_file = self._loaded_from or self.results_file
if self.results_file is None:
raise ValueError("Need to provide either results_file or a valid lookup_files")
res_urls = {"flats": {}, "darks": {}}
for what in ["flats", "darks"]:
for img_id, img_arr in res[what].items():
res_urls[what][img_id] = DataUrl(
file_path=results_file,
data_path=os.path.join(entry, process_name, "results", what)
)
return res_urls
......@@ -52,16 +52,23 @@ class ProcessConfig:
if not (os.path.isfile(conf_fname)):
raise ValueError("No such file: %s" % conf_fname)
conf = NabuConfigParser(conf_fname).conf_dict
dataset_infos = analyze_dataset(conf["dataset"]["location"])
conf = validate_nabu_config(conf)
dataset_infos = analyze_dataset(
conf["dataset"]["location"],
processes_file=conf["preproc"]["processes_file"],
extra_options={
"force_flatfield": conf["preproc"]["flatfield_enabled"] == "forced"
}
)
else:
if (conf_dict is None) or (dataset_infos is None):
raise ValueError(args_error_msg)
conf = conf_dict
conf = validate_nabu_config(conf_dict)
assert (isinstance(dataset_infos, EDFDatasetAnalyzer)) or (
isinstance(dataset_infos, HDF5DatasetAnalyzer)
)
self.nabu_config = conf
self.dataset_infos = dataset_infos
self.nabu_config = validate_nabu_config(conf)
self.checks = checks
self.remove_unused_radios = remove_unused_radios
self._get_cor()
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment