processconfig.py 25.5 KB
Newer Older
Pierre Paleo's avatar
Pierre Paleo committed
1
import os
2
import posixpath
3
4
from ..utils import copy_dict_items, compare_dicts
from ..io.config import NabuConfigParser, validate_nabu_config, import_h5_to_dict
5
from ..io.utils import hdf5_entry_exists, get_h5_value
6
from .dataset_analyzer import analyze_dataset, DatasetAnalyzer
Pierre Paleo's avatar
Pierre Paleo committed
7
from .dataset_validator import NabuValidator
8
from .estimators import COREstimator, DetectorTiltEstimator, SinoCOREstimator
9
from .logger import Logger, PrinterLogger
10
from .params import radios_rotation_mode
11
from .utils import extract_parameters
Pierre Paleo's avatar
Pierre Paleo committed
12

13

Pierre Paleo's avatar
Pierre Paleo committed
14
15
16
17
18
19
20
21
22
23
24
25
class ProcessConfig:
    """
    A class for describing the Nabu process configuration.
    """

    def __init__(
        self,
        conf_fname=None,
        conf_dict=None,
        dataset_infos=None,
        checks=True,
        remove_unused_radios=True,
26
        create_logger=False,
Pierre Paleo's avatar
Pierre Paleo committed
27
28
29
30
31
32
33
34
    ):
        """
        Initialize a ProcessConfig class.

        Parameters
        ----------
        conf_fname: str
            Path to the nabu configuration file. If provided, the parameters
35
            `conf_dict` is ignored.
payno's avatar
payno committed
36
        conf_dict: dict
Pierre Paleo's avatar
Pierre Paleo committed
37
            A dictionary describing the nabu processing steps.
38
            If provided, the parameter `conf_fname` is ignored.
39
40
        dataset_infos: DatasetAnalyzer
            A `DatasetAnalyzer` class instance.
Pierre Paleo's avatar
Pierre Paleo committed
41
42
43
44
45
        checks: bool, optional, default is True
            Whether to perform checks on configuration and datasets (recommended !)
        remove_unused_radios: bool, optional, default is True
            Whether to remove unused radios, i.e radios present in the dataset,
            but not explicitly listed in the scan metadata.
46
47
48
49
50
51
52
        create_logger: str or bool, optional
            Whether to create a Logger object. Default is False, meaning that the logger
            object creation is left to the user.
            If set to True, a Logger object is created, and logs will be written
            to the file "nabu_dataset_name.log".
            If set to a string, a Logger object is created, and the logs will be written
            to the file specified by this string.
Pierre Paleo's avatar
Pierre Paleo committed
53
        """
54
55
        if not((conf_fname is None) ^ (conf_dict is None)):
            raise ValueError("You must either provide 'conf_fname' or 'conf_dict'")
Pierre Paleo's avatar
Typos    
Pierre Paleo committed
56
        self.conf_fname = conf_fname
Pierre Paleo's avatar
Pierre Paleo committed
57
        if conf_fname is not None:
58
            if not os.path.isfile(conf_fname):
Pierre Paleo's avatar
Pierre Paleo committed
59
                raise ValueError("No such file: %s" % conf_fname)
60
61
62
63
            conf_dict = NabuConfigParser(conf_fname).conf_dict
        self.nabu_config = validate_nabu_config(conf_dict)
        self._create_logger(create_logger)
        self.dataset_infos = dataset_infos or self._get_dataset_infos()
Pierre Paleo's avatar
Pierre Paleo committed
64
65
        self.checks = checks
        self.remove_unused_radios = remove_unused_radios
66
        self.resume_from_step = None
67
        self.steps_to_save = []
68
        self._get_tilt()
Pierre Paleo's avatar
Pierre Paleo committed
69
        self._get_cor()
70
71
        self.validation_stage2()
        self.build_processing_steps()
Pierre Paleo's avatar
Pierre Paleo committed
72

Pierre Paleo's avatar
Pierre Paleo committed
73

74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
    def _get_dataset_infos(self):
        dataset_infos = analyze_dataset(
            self.nabu_config["dataset"]["location"],
            processes_file=self.nabu_config["preproc"]["processes_file"],
            extra_options={
                "force_flatfield": self.nabu_config["preproc"]["flatfield_enabled"] == "forced",
                "exclude_projections": self.nabu_config["dataset"]["exclude_projections"],
                "output_dir": self.nabu_config["output"]["location"],
                "hdf5_entry": self.nabu_config["dataset"]["hdf5_entry"],
            },
            logger=self.logger
        )
        return dataset_infos


89
90
91
92
93
94
    def _create_logger(self, create_logger):
        if create_logger is False:
            self.logger = PrinterLogger()
            return
        elif create_logger is True:
            dataset_loc = self.nabu_config["dataset"]["location"]
95
            dataset_fname_rel = os.path.basename(dataset_loc)
96
            if os.path.isfile(dataset_loc):
97
98
99
100
                logger_filename = os.path.join(
                    os.path.abspath(os.getcwd()),
                    os.path.splitext(dataset_fname_rel)[0] + "_nabu.log"
                )
101
            else:
102
103
104
105
                logger_filename = os.path.join(
                    os.path.abspath(os.getcwd()),
                    dataset_fname_rel + "_nabu.log"
                )
106
107
108
109
110
111
        elif isinstance(create_logger, str):
            logger_filename = create_logger
        else:
            raise ValueError("Expected bool or str for create_logger")
        self.logger = Logger(
            "nabu",
Pierre Paleo's avatar
Pierre Paleo committed
112
            level=self.nabu_config["pipeline"]["verbosity"],
113
114
115
116
            logfile=logger_filename
        )


Pierre Paleo's avatar
Pierre Paleo committed
117
118
    def _get_cor(self):
        cor = self.nabu_config["reconstruction"]["rotation_axis_position"]
119
        if isinstance(cor, str): # auto-CoR
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
            cor_slice = self.nabu_config["reconstruction"]["cor_slice"]
            if cor_slice is not None or cor == "sino-coarse-to-fine":
                subsampling = extract_parameters(
                    self.nabu_config["reconstruction"]["cor_options"]
                ).get("subsampling", 10)
                self.corfinder = SinoCOREstimator(
                    self.dataset_infos,
                    cor_slice or 0,
                    subsampling=subsampling,
                    do_flatfield=self.nabu_config["preproc"]["flatfield_enabled"],
                    cor_options=self.nabu_config["reconstruction"]["cor_options"],
                    logger=self.logger
                )
            else:
                self.corfinder = COREstimator(
                    self.dataset_infos,
                    halftomo=self.nabu_config["reconstruction"]["enable_halftomo"],
                    do_flatfield=self.nabu_config["preproc"]["flatfield_enabled"],
                    cor_options=self.nabu_config["reconstruction"]["cor_options"],
                    logger=self.logger
                )
            cor = self.corfinder.find_cor(method=cor)
142
        self.dataset_infos.axis_position = cor
Pierre Paleo's avatar
Pierre Paleo committed
143
144


145
146
    def _get_tilt(self):
        tilt = self.nabu_config["preproc"]["tilt_correction"]
147
        user_rot_projs = self.nabu_config["preproc"]["rotate_projections"]
148
        if user_rot_projs is not None and tilt is not None:
149
150
151
152
153
154
            msg = "=" * 80 + "\n"
            msg += "Both 'detector_tilt' and 'rotate_projections' options were provided. The option 'rotate_projections' will take precedence. This means that the projections will be rotated by %f degrees and the option 'detector_tilt' will be ignored." % user_rot_projs
            msg += "\n" + "=" * 80
            self.logger.warning(msg)
            tilt = user_rot_projs
        #
155
156
157
158
159
160
        if isinstance(tilt, str): # auto-tilt
            self.tilt_estimator = DetectorTiltEstimator(
                self.dataset_infos,
                logger=self.logger,
                autotilt_options=self.nabu_config["preproc"]["autotilt_options"]
            )
161
            tilt = self.tilt_estimator.find_tilt(tilt_method=tilt)
162
163
164
        self.dataset_infos.detector_tilt = tilt


165
    def validation_stage2(self):
Pierre Paleo's avatar
Pierre Paleo committed
166
167
        validator = NabuValidator(self.nabu_config, self.dataset_infos)
        if self.checks:
Pierre Paleo's avatar
Pierre Paleo committed
168
            validator.perform_all_checks(remove_unused_radios=self.remove_unused_radios)
169
170


171
172
173
174
175
176
177
178
179
180
181
    def get_radios_rotation_mode(self):
        """
        Determine whether projections are to be rotated, and if so, when they are to be rotated.

        Returns
        -------
        method: str or None
            Rotation method: one of the values of `nabu.resources.params.radios_rotation_mode`
        """
        user_rotate_projections = self.nabu_config["preproc"]["rotate_projections"]
        tilt = self.dataset_infos.detector_tilt
182
183
184
185
        phase_method = self.nabu_config["phase"]["method"]
        do_ctf = phase_method == "CTF"
        do_pag = phase_method == "paganin"
        do_unsharp = self.nabu_config["phase"]["unsharp_coeff"] > 0
186
187
188
189
        if user_rotate_projections is None and tilt is None:
            return None
        if do_ctf:
            return "full"
190
191
192
193
194
195
        # TODO "chunked" rotation is done only when using a "processing margin"
        # For now the processing margin is enabled only if phase or unsharp is enabled.
        # We can either
        #   - Enable processing margin if rotating projections is needed (more complicated to implement)
        #   - Always do "full" rotation (simpler to implement, at the expense of performances)
        if do_pag or do_unsharp:
196
            return "chunk"
197
198
        else:
            return "full"
199
200


201
202
203
204
205
206
207
208
    def build_processing_steps(self):
        """
        Build a list of processing steps from a ProcessConfig instance.
        The returned structures are a more compact and ready-to-use representation
        of the two main fields of ProcessConfig (dataset_infos and nabu_config).
        """
        nabu_config = self.nabu_config
        dataset_infos = self.dataset_infos
Pierre Paleo's avatar
Pierre Paleo committed
209
        binning = (nabu_config["dataset"]["binning"], nabu_config["dataset"]["binning_z"])
210
211
212
213
214
215
216
217
218
219
        tasks = []
        options = {}

        #
        # Dataset / Get data
        #
        # First thing to do is to get the data (radios or sinograms)
        # For now data is assumed to be on disk (see issue #66).
        tasks.append("read_chunk")
        options["read_chunk"] = {
220
            "files": dataset_infos.projections,
Pierre Paleo's avatar
Pierre Paleo committed
221
            "sub_region": None,
Pierre Paleo's avatar
Pierre Paleo committed
222
            "binning": binning,
223
            "dataset_subsampling": nabu_config["dataset"]["projections_subsampling"]
224
225
        }
        #
Pierre Paleo's avatar
Pierre Paleo committed
226
        # Flat-field
227
228
229
230
        #
        if nabu_config["preproc"]["flatfield_enabled"]:
            tasks.append("flatfield")
            options["flatfield"] = {
Pierre Paleo's avatar
Pierre Paleo committed
231
232
233
                #  ChunkReader handles binning/subsampling by itself,
                # but FlatField needs "real" indices (after binning/subsampling)
                "projs_indices": dataset_infos._projs_indices_subsampled,
Pierre Paleo's avatar
Pierre Paleo committed
234
                "binning": binning,
235
236
                "do_flat_distortion": nabu_config["preproc"]["flat_distortion_correction_enabled"],
                "flat_distortion_params": extract_parameters(nabu_config["preproc"]["flat_distortion_params"]),
237
            }
238
239
240
        #
        # Spikes filter
        #
241
242
243
244
245
246
        if nabu_config["preproc"]["ccd_filter_enabled"]:
            tasks.append("ccd_correction")
            options["ccd_correction"] = {
                "type": "median_clip", # only one available for now
                "median_clip_thresh": nabu_config["preproc"]["ccd_filter_threshold"],
            }
247
        #
Pierre Paleo's avatar
Pierre Paleo committed
248
249
        # Double flat field
        #
250
251
252
253
        if nabu_config["preproc"]["double_flatfield_enabled"]:
            tasks.append("double_flatfield")
            options["double_flatfield"] = {
                "sigma": nabu_config["preproc"]["dff_sigma"],
Pierre Paleo's avatar
Pierre Paleo committed
254
                "processes_file": nabu_config["preproc"]["processes_file"],
255
256
            }
        #
257
        # Radios rotation (do it here if possible)
258
        #
259
        if self.get_radios_rotation_mode() == "chunk":
260
261
            tasks.append("rotate_projections")
            options["rotate_projections"] = {
262
                "angle": nabu_config["preproc"]["rotate_projections"] or dataset_infos.detector_tilt,
263
                "center": nabu_config["preproc"]["rotate_projections_center"],
264
                "mode": "chunk",
265
266
            }
        #
Pierre Paleo's avatar
Pierre Paleo committed
267
268
269
        #
        # Phase retrieval
        #
Pierre Paleo's avatar
Pierre Paleo committed
270
        if nabu_config["phase"]["method"] is not None:
271
272
            tasks.append("phase")
            options["phase"] = copy_dict_items(
273
                nabu_config["phase"], ["method", "delta_beta", "margin", "padding_type"]
274
            )
275
276
277
            options["phase"].update({
                "energy_kev": dataset_infos.energy,
                "distance_cm": dataset_infos.distance * 1e2,
Pierre Paleo's avatar
Pierre Paleo committed
278
                "distance_m": dataset_infos.distance,
279
                "pixel_size_microns": dataset_infos.pixel_size,
Pierre Paleo's avatar
Pierre Paleo committed
280
                "pixel_size_m": dataset_infos.pixel_size * 1e-6,
281
            })
Pierre Paleo's avatar
Pierre Paleo committed
282
283
            if binning != (1, 1):
                options["phase"]["delta_beta"] /= (binning[0] * binning[1])
284
285
            if options["phase"]["method"] == "CTF":
                self._get_ctf_parameters(options["phase"])
Pierre Paleo's avatar
Pierre Paleo committed
286
287
288
        #
        # Unsharp
        #
289
290
291
292
293
        if nabu_config["phase"]["unsharp_coeff"] > 0:
            tasks.append("unsharp_mask")
            options["unsharp_mask"] = copy_dict_items(
                nabu_config["phase"], ["unsharp_coeff", "unsharp_sigma"]
            )
Pierre Paleo's avatar
Pierre Paleo committed
294
295
296
        #
        # -logarithm
        #
297
298
299
        if nabu_config["preproc"]["take_logarithm"]:
            tasks.append("take_log")
            options["take_log"] = copy_dict_items(nabu_config["preproc"], ["log_min_clip", "log_max_clip"])
Pierre Paleo's avatar
Pierre Paleo committed
300
        #
301
302
303
304
305
        # Radios rotation (do it here if mode=="full")
        #
        if self.get_radios_rotation_mode() == "full":
            tasks.append("rotate_projections")
            options["rotate_projections"] = {
Pierre Paleo's avatar
Pierre Paleo committed
306
307
                "angle": nabu_config["preproc"]["rotate_projections"] or dataset_infos.detector_tilt,
                "center": nabu_config["preproc"]["rotate_projections_center"],
308
                "mode": "full",
309
310
            }
        #
Pierre Paleo's avatar
Pierre Paleo committed
311
312
313
314
315
316
317
318
319
        # Translation movements
        #
        translations = dataset_infos.translations
        if translations is not None:
            tasks.append("radios_movements")
            options["radios_movements"] = {
                "translation_movements": dataset_infos.translations
            }
        #
320
321
        # Sinogram normalization (before half-tomo)
        #
Pierre Paleo's avatar
Pierre Paleo committed
322
        if nabu_config["preproc"]["sino_normalization"] is not None:
323
324
325
326
            tasks.append("sino_normalization")
            options["sino_normalization"] = {
                "method": nabu_config["preproc"]["sino_normalization"]
            }
327
328
329
330
331
332
        #
        # Sinogram-based rings artefacts removal
        #
        if nabu_config["preproc"]["sino_rings_correction"]:
            tasks.append("sino_rings_correction")
            options["sino_rings_correction"] = {
Pierre Paleo's avatar
Pierre Paleo committed
333
                "user_options": nabu_config["preproc"]["sino_rings_options"],
334
            }
335
        #
Pierre Paleo's avatar
Pierre Paleo committed
336
337
        # Reconstruction
        #
Pierre Paleo's avatar
Pierre Paleo committed
338
        if nabu_config["reconstruction"]["method"] is not None:
339
340
341
342
343
344
            tasks.append("build_sino")
            options["build_sino"] = copy_dict_items(
                nabu_config["reconstruction"],
                ["rotation_axis_position", "enable_halftomo", "start_x", "end_x",
                 "start_y", "end_y", "start_z", "end_z"]
            )
Pierre Paleo's avatar
Pierre Paleo committed
345
            options["build_sino"]["axis_correction"] = dataset_infos.axis_correction
346
347
348
349
            tasks.append("reconstruction")
            # Iterative is not supported through configuration file for now.
            options["reconstruction"] = copy_dict_items(
                nabu_config["reconstruction"],
350
351
                ["method", "rotation_axis_position", "fbp_filter_type",
                "padding_type", "enable_halftomo",
352
353
                "start_x", "end_x", "start_y", "end_y", "start_z", "end_z"]
            )
354
            rec_options = options["reconstruction"]
355
356
            rec_options["rotation_axis_position"] = dataset_infos.axis_position
            options["build_sino"]["rotation_axis_position"] = dataset_infos.axis_position
357
358
359
360
361
362
363
            rec_options["axis_correction"] = dataset_infos.axis_correction
            rec_options["angles"] = dataset_infos.reconstruction_angles
            rec_options["radio_dims_y_x"] = dataset_infos.radio_dims[::-1]
            rec_options["pixel_size_cm"] = dataset_infos.pixel_size * 1e-4 # pix size is in microns
            if rec_options["enable_halftomo"]:
                rec_options["angles"] = rec_options["angles"][:rec_options["angles"].size//2]
                cor_i = int(round(rec_options["rotation_axis_position"]))
364
                # New keys
365
                rec_options["rotation_axis_position_halftomo"] = (2*cor_i-1)/2.
366
            # New key
367
            rec_options["cor_estimated_auto"] = isinstance(nabu_config["reconstruction"]["rotation_axis_position"], str)
368
369
370
371
372
373
374
375
376
377
378
        #
        # Histogram
        #
        if nabu_config["postproc"]["output_histogram"]:
            tasks.append("histogram")
            options["histogram"] = copy_dict_items(
                nabu_config["postproc"], ["histogram_bins"]
            )
        #
        # Save
        #
379
380
381
382
383
        if nabu_config["output"]["location"] is not None:
            tasks.append("save")
            options["save"] = copy_dict_items(
                nabu_config["output"], list(nabu_config["output"].keys())
            )
Pierre Paleo's avatar
Pierre Paleo committed
384
            options["save"]["overwrite"] = nabu_config["output"]["overwrite_results"]
385
386
387

        self.processing_steps = tasks
        self.processing_options = options
388
389
        if set(self.processing_steps) != set(self.processing_options.keys()):
            raise ValueError("Something wrong with process_config: options do not correspond to steps")
Pierre Paleo's avatar
Pierre Paleo committed
390
391
392
393
394
        # Add check
        if set(self.processing_steps) != set(self.processing_options.keys()):
            raise ValueError("Something wrong when building processing steps")
        #
        self._configure_save_steps()
395
        self._configure_resume()
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432

    def _get_ctf_parameters(self, phase_options):
        dataset_info = self.dataset_infos
        user_phase_options = self.nabu_config["phase"]

        ctf_geom = extract_parameters(user_phase_options["ctf_geometry"])
        ctf_advanced_params = extract_parameters(user_phase_options["ctf_advanced_params"])

        # z1_vh
        z1_v = ctf_geom["z1_v"]
        z1_h = ctf_geom["z1_h"]
        z1_vh = None
        if z1_h is None and z1_v is None:
            # parallel beam
            z1_vh = None
        elif (z1_v is None) ^ (z1_h is None):
            # only one is provided: source-sample distance
            z1_vh = z1_v or z1_h
        if z1_h is not None and z1_v is not None:
            # distance of the vertically focused source (horizontal line) and the horizontaly focused source (vertical line)
            # for KB mirrors
            z1_vh = (z1_v, z1_h)
        # pix_size_det
        pix_size_det = ctf_geom["detec_pixel_size"] or dataset_info.pixel_size * 1e-6
        # wavelength
        wavelength = 1.23984199e-9 / dataset_info.energy

        phase_options["ctf_geo_pars"] = {
            "z1_vh": z1_vh,
            "z2": phase_options["distance_m"],
            "pix_size_det": pix_size_det,
            "wavelength": wavelength,
            "magnification": bool(ctf_geom["magnification"]),
            "length_scale": ctf_advanced_params["length_scale"]
        }
        phase_options["ctf_lim1"] = ctf_advanced_params["lim1"]
        phase_options["ctf_lim2"] = ctf_advanced_params["lim2"]
433
        phase_options["ctf_normalize_by_mean"] = ctf_advanced_params["normalize_by_mean"]
Pierre Paleo's avatar
Pierre Paleo committed
434
435
436


    def _configure_save_steps(self):
437
        self._dump_sinogram = False
Pierre Paleo's avatar
Pierre Paleo committed
438
439
440
        steps_to_save = self.nabu_config["pipeline"]["save_steps"]
        if steps_to_save in (None, ""):
            return
441
        steps_to_save = [s.strip() for s in steps_to_save.split(",")]
Pierre Paleo's avatar
Pierre Paleo committed
442
        for step in self.processing_steps:
443
            step = step.strip()
Pierre Paleo's avatar
Pierre Paleo committed
444
445
            if step in steps_to_save:
                self.processing_options[step]["save"] = True
446
                self.processing_options[step]["save_steps_file"] = self.get_save_steps_file(step_name=step)
447
        # "sinogram" is a special keyword, not explicitly in the processing steps
448
449
        if "sinogram" in steps_to_save:
            self._dump_sinogram = True
450
            self._dump_sinogram_file = self.get_save_steps_file(step_name="sinogram")
451
        self.steps_to_save = steps_to_save
452

453
454
455

    def _get_dump_file_and_h5_path(self):
        resume_from = self.resume_from_step
456
        process_file = self.get_save_steps_file(step_name=resume_from)
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
        if not os.path.isfile(process_file):
            self.logger.error(
                "Cannot resume processing from step '%s': no such file %s" % (resume_from, process_file)
            )
            return None, None
        h5_entry = self.dataset_infos.hdf5_entry or "entry"
        process_h5_path = posixpath.join(
            h5_entry,
            resume_from,
            "results/data"
        )
        if not hdf5_entry_exists(process_file, process_h5_path):
            self.logger.error(
                "Could not find data in %s in file %s" % (process_h5_path, process_file)
            )
            process_h5_path = None
        return process_file, process_h5_path


476
    def _configure_resume(self):
477
478
479
480
        resume_from = self.nabu_config["pipeline"]["resume_from_step"]
        if resume_from in (None, ""):
            return
        resume_from = resume_from.strip(" ,;")
481
482
483
        self.resume_from_step = resume_from

        processing_steps = self.processing_steps
484
485
486
487
488
489
        # special case: resume from sinogram
        if resume_from == "sinogram":
            if "build_sino" not in processing_steps:
                msg = "Cannot resume processing from step 'sinogram': reconstruction is disabled with this configuration"
                self.logger.fatal(msg)
                raise ValueError(msg)
490
            idx = processing_steps.index("build_sino") # disable up to 'build_sino', not included
491
492
        #
        elif resume_from in processing_steps:
493
            idx = processing_steps.index(resume_from) + 1 # disable up to resume_from, included
494
495
        else:
            msg = "Cannot resume processing from step '%s': no such step in the current configuration" % resume_from
496
497
498
499
500
501
502
503
504
            self.logger.error(msg)
            self.resume_from_step = None
            return

        # Get corresponding file and h5 path
        process_file, process_h5_path = self._get_dump_file_and_h5_path()
        if process_file is None or process_h5_path is None:
            self.resume_from_step = None
            return
505
506
507
508
509
510
511
512
        dump_info = self._check_dump_file(process_file, raise_on_error=False)
        if dump_info is None:
            self.logger.error(
                "Cannot resume from step %s: cannot use file %s" % (resume_from, process_file)
            )
            self.resume_from_step = None
            return
        dump_start_z, dump_end_z = dump_info
513
514
515
516
517
518
519
520
521

        # Disable steps
        steps_to_disable = processing_steps[1:idx]
        self.logger.debug("Disabling steps %s" % str(steps_to_disable))
        for step_name in steps_to_disable:
            processing_steps.remove(step_name)
            self.processing_options.pop(step_name)

        # Update configuration
522
523
524
525
        self.logger.info(
            "Processing will be resumed from step '%s' using file %s"
            % (resume_from, process_file)
        )
526
        self._old_read_chunk = self.processing_options["read_chunk"]
527
528
529
530
        self.processing_options["read_chunk"] = {
            "process_file": process_file,
            "process_h5_path": process_h5_path,
            "step_name": resume_from,
531
532
            "dump_start_z": dump_start_z,
            "dump_end_z": dump_end_z
533
        }
534
535
536
537
538
539
540
541
542
543
        # Dont dump a step if we resume from this step
        if resume_from in self.steps_to_save:
            self.logger.warning(
                "Processing is resumed from step '%s'. This step won't be dumped to a file" % resume_from
            )
            self.steps_to_save.remove(resume_from)
            if resume_from == "sinogram":
                self._dump_sinogram = False
            else:
                self.processing_options[resume_from].pop("save")
544

545

546
547
548
549
550
    def _check_dump_file(self, process_file, raise_on_error=False):
        """
        Return (start_z, end_z) on success
        Return None on failure
        """
551
552
        # Ensure data in the file correspond to what is currently asked
        # TODO add more tests to compare configurations
553
554
        if self.resume_from_step is None:
            return None
Pierre Paleo's avatar
Pierre Paleo committed
555
556
557

        # Check dataset shape/start_z/end_z
        rec_cfg_h5_path = posixpath.join(
558
559
560
561
            self.dataset_infos.hdf5_entry or "entry",
            self.resume_from_step,
            "configuration/nabu_config/reconstruction"
        )
Pierre Paleo's avatar
Pierre Paleo committed
562
        dump_start_z = get_h5_value(process_file, posixpath.join(rec_cfg_h5_path, "start_z"))
Pierre Paleo's avatar
Pierre Paleo committed
563
        dump_end_z = get_h5_value(process_file, posixpath.join(rec_cfg_h5_path, "end_z"))
564
565
566
        start_z, end_z = self.nabu_config["reconstruction"]["start_z"], self.nabu_config["reconstruction"]["end_z"]
        if not (dump_start_z <= start_z and end_z <= dump_end_z):
            msg = "File %s was built with start_z=%d, end_z=%d but current configuration asks for start_z=%d, end_z=%d" % (process_file, dump_start_z, dump_end_z, start_z, end_z)
567
568
            if not raise_on_error:
                self.logger.error(msg)
569
                return None
570
            self.logger.fatal(msg)
571
            raise ValueError(msg)
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594

        # Check parameters other than reconstruction
        filedump_nabu_config = import_h5_to_dict(
            process_file,
            posixpath.join(
                self.dataset_infos.hdf5_entry or "entry",
                self.resume_from_step,
                "configuration/nabu_config"
            )
        )
        sections_to_ignore = ["reconstruction"]
        for section in sections_to_ignore:
            filedump_nabu_config[section] = self.nabu_config[section]
        diff = compare_dicts(filedump_nabu_config, self.nabu_config)
        if diff is not None:
            msg = "Nabu configuration in file %s differ from the current one: %s" % (process_file, diff)
            if not raise_on_error:
                self.logger.error(msg)
                return None
            self.logger.fatal(msg)
            raise ValueError(msg)
        #

595
        return (dump_start_z, dump_end_z)
596
597


598
599
600
601
602
603
604
605
    def get_save_steps_file(self, step_name=None):
        if self.nabu_config["pipeline"]["steps_file"] not in (None, ""):
            return self.nabu_config["pipeline"]["steps_file"]
        nabu_save_options = self.nabu_config["output"]
        output_dir = nabu_save_options["location"]
        file_prefix = step_name + "_" + nabu_save_options["file_prefix"]
        fname = os.path.join(output_dir, file_prefix) + ".hdf5"
        return fname