Commit c6cf005b authored by Pierre Paleo's avatar Pierre Paleo
Browse files

DatasetAnalyzer: add logger

parent 252e639d
Pipeline #36197 passed with stages
in 15 minutes and 31 seconds
......@@ -9,6 +9,8 @@ from ..thirdparty.tomwer_load_flats_darks import get_flats_frm_process_file, get
from .nxflatfield import NXFlatField
from ..utils import is_writeable
from .utils import is_hdf5_extension
from .logger import LoggerOrPrint
dataset_infos = {
"num_radios": None,
......@@ -28,7 +30,7 @@ class DatasetAnalyzer(object):
"""
Base class for datasets analyzers.
"""
def __init__(self, location, processes_file=None, extra_options=None):
def __init__(self, location, processes_file=None, extra_options=None, logger=None):
"""
Initialize a Dataset analyzer.
......@@ -43,7 +45,10 @@ class DatasetAnalyzer(object):
Available options are the following:
- force_flatfield
- output_dir
logger: logging object, optional
Logger. If not set, messages will just be printed in stdout.
"""
self.logger = LoggerOrPrint(logger)
self.location = location
self.processes_file = processes_file
self._set_extra_options(extra_options)
......@@ -139,7 +144,7 @@ class EDFDatasetAnalyzer(DatasetAnalyzer):
"""
EDF Dataset analyzer for legacy ESRF acquisitions
"""
def __init__(self, location, n_frames=1, processes_file=None, extra_options=None):
def __init__(self, location, n_frames=1, processes_file=None, extra_options=None, logger=None):
"""
EDF Dataset analyzer.
......@@ -148,7 +153,9 @@ class EDFDatasetAnalyzer(DatasetAnalyzer):
location: str
Location of the folder containing EDF files
"""
super().__init__(location, processes_file=processes_file, extra_options=extra_options)
super().__init__(
location, processes_file=processes_file, extra_options=extra_options, logger=logger
)
if not(os.path.isdir(location)):
raise ValueError("%s is not a directory" % location)
self._init_dataset_scan("edf", n_frames=n_frames)
......@@ -180,7 +187,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
"""
HDF5 dataset analyzer
"""
def __init__(self, location, processes_file=None, extra_options=None):
def __init__(self, location, processes_file=None, extra_options=None, logger=None):
"""
HDF5 Dataset analyzer.
......@@ -189,7 +196,9 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
location: str
Location of the HDF5 master file
"""
super().__init__(location, processes_file=processes_file, extra_options=extra_options)
super().__init__(
location, processes_file=processes_file, extra_options=extra_options, logger=logger
)
if not(os.path.isfile(location)):
raise ValueError("%s is not a file" % location)
self._init_dataset_scan("hdf5")
......@@ -213,7 +222,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
tomwer_processes_file = os.path.join(self.dataset_scanner.path, "tomwer_processes.h5")
if not(os.path.isfile(tomwer_processes_file)):
return False
print("Loading darks and refs from %s" % tomwer_processes_file)
self.logger.info("Loading darks and refs from %s" % tomwer_processes_file)
new_flats = get_flats_frm_process_file(
tomwer_processes_file, self.dataset_scanner.entry
)
......@@ -265,7 +274,8 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
results_url=results_url,
force_load_existing_results=self.extra_options["force_flatfield"],
flats_reduction="median",
darks_reduction="mean"
darks_reduction="mean",
logger=self.logger
)
res = self._nxflatfield.get_final_urls()
self.flats = res["flats"]
......@@ -320,7 +330,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
def analyze_dataset(dataset_path, processes_file=None, extra_options=None):
def analyze_dataset(dataset_path, processes_file=None, extra_options=None, logger=None):
if not(os.path.isdir(dataset_path)):
if not(os.path.isfile(dataset_path)):
raise ValueError("Error: %s no such file or directory" % dataset_path)
......@@ -330,7 +340,10 @@ def analyze_dataset(dataset_path, processes_file=None, extra_options=None):
else: # directory -> assuming EDF
dataset_analyzer_class = EDFDatasetAnalyzer
dataset_structure = dataset_analyzer_class(
dataset_path, processes_file=processes_file, extra_options=extra_options
dataset_path,
processes_file=processes_file,
extra_options=extra_options,
logger=logger
)
return dataset_structure
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment