From 4c2139c304066fbd4218234dbae755bf694905d3 Mon Sep 17 00:00:00 2001 From: Alessandro <mirone@esrf.fr> Date: Tue, 8 Dec 2020 16:50:42 +0100 Subject: [PATCH 1/2] this modification should make sure that lists of keys made of strings are always properly sorted according to their numerical part --- XRStools/fitmap.py | 2 +- XRStools/id20_imaging.py | 2 +- XRStools/ixs_offDiagonal.py | 2 +- XRStools/roiSelectionWidget.py | 2 +- XRStools/roifinder_and_gui.py | 28 ++++++++++++++-------------- XRStools/superresolution.py | 2 +- XRStools/xrs_alignment.py | 2 +- XRStools/xrs_imaging.py | 3 ++- XRStools/xrs_read.py | 34 +++++++++++++++++----------------- XRStools/xrs_rois.py | 8 ++++---- XRStools/xrs_scans.py | 14 +++++++------- 11 files changed, 50 insertions(+), 49 deletions(-) diff --git a/XRStools/fitmap.py b/XRStools/fitmap.py index f01743e..c3d5fdf 100644 --- a/XRStools/fitmap.py +++ b/XRStools/fitmap.py @@ -151,7 +151,7 @@ class Plot2D(MyMplCanvas): def compute_initial_figure(self): file_list = glob.glob("scan_*.txt") - file_list.sort() + file_list.sort( key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) img=[] error = [] diff --git a/XRStools/id20_imaging.py b/XRStools/id20_imaging.py index 37091d1..fe46f91 100644 --- a/XRStools/id20_imaging.py +++ b/XRStools/id20_imaging.py @@ -302,7 +302,7 @@ class imageset: def loadkimberlite(self,matfilename): data_dict = io.loadmat(matfilename) - sorted_keys = sorted(data_dict.keys()) + sorted_keys = sorted(data_dict.keys(), key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) sy = data_dict['sy'][0] allsx = [] for key in sorted_keys[3:12]: diff --git a/XRStools/ixs_offDiagonal.py b/XRStools/ixs_offDiagonal.py index 3049dd2..74b6604 100644 --- a/XRStools/ixs_offDiagonal.py +++ b/XRStools/ixs_offDiagonal.py @@ -313,7 +313,7 @@ class offDiagonal: ds_group = h5group[ "offDiaDataSets" ] keys = [nm for nm in ds_group] - keys.sort() + keys.sort(key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) for k in keys: diff --git a/XRStools/roiSelectionWidget.py b/XRStools/roiSelectionWidget.py index a65ccfd..e7f4d60 100644 --- a/XRStools/roiSelectionWidget.py +++ b/XRStools/roiSelectionWidget.py @@ -884,7 +884,7 @@ class mainwindow(Qt.QMainWindow): print( x,y, cx, cy) newspots.append((y,x,i, int((cy*4+cx)+1) ) ) else: - spots.sort() + spots.sort(key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) newspots = [] for k,(y,x,i) in enumerate( spots): newspots.append((y,x,i,k+1) ) diff --git a/XRStools/roifinder_and_gui.py b/XRStools/roifinder_and_gui.py index e243d32..1aa3a61 100644 --- a/XRStools/roifinder_and_gui.py +++ b/XRStools/roifinder_and_gui.py @@ -767,7 +767,7 @@ class roi_finder: counter = 0 new_rois = {} - for data, key in zip(pw_data, sorted(roi_obj.red_rois)): # go through each matrix (one per ROI) + for data, key in zip(pw_data, sorted(roi_obj.red_rois , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): # go through each matrix (one per ROI) # decompose data, choose method if method == 'nnma': # non negative matrix factorisation @@ -883,7 +883,7 @@ class roi_finder: counter = 0 new_rois = {} - for data, key in zip(pw_data, sorted(self.roi_obj.red_rois)): # go through each matrix (one per ROI) + for data, key in zip(pw_data, sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): # go through each matrix (one per ROI) # decompose data, choose method if method == 'nnma': # non negative matrix factorisation @@ -1123,7 +1123,7 @@ class roi_finder: plt.ioff() counter = 0 - for data, key in zip(cw_data, sorted(self.roi_obj.red_rois)): + for data, key in zip(cw_data, sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): for ii in range(data.shape[1]): if save_dataset: the_shelve = shelve.open(save_dataset) @@ -1254,7 +1254,7 @@ class roi_finder: plt.cla() counter = 0 - for data, key in zip(cw_data, sorted(self.roi_obj.red_rois)): + for data, key in zip(cw_data, sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): print('Processing: ', key) for ii in range(data.shape[1]): print('>>>>>>>>> ', ii) @@ -1375,11 +1375,11 @@ class roi_finder: cw_data[key] = np.sum( data_dict_norm[key], axis=1 ) # fetch all red_roi keys - roi_keys = [key for key in sorted(self.roi_obj.red_rois)] + roi_keys = [key for key in sorted(self.roi_obj.red_rois , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )] # deep copy all ROIs, set all to zero red_rois_copy = {} - for key in sorted( self.roi_obj.red_rois ): + for key in sorted( self.roi_obj.red_rois , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): red_rois_copy[key] = copy.deepcopy( self.roi_obj.red_rois[key] ) red_rois_copy[key][1] = np.zeros_like( self.roi_obj.red_rois[key][1] ) @@ -1492,7 +1492,7 @@ class roi_finder: self.column_ind -= 0 def finish( self, event ): - for key in sorted(red_rois_copy): + for key in sorted(red_rois_copy, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): red_rois_copy[key][1] = self.red_rois[key][1] plt.close() @@ -1602,7 +1602,7 @@ class roi_finder: plt.ioff() counter = 0 - for data, key in zip(cw_data, sorted(self.roi_obj.red_rois)): + for data, key in zip(cw_data, sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): print('Processing: ', key) for ii in range(data.shape[1]): plt.cla() @@ -1955,11 +1955,11 @@ class roi_finder: rw_data[key] = np.sum( data_dict_norm[key], axis=2 ) # fetch all red_roi keys - roi_keys = [key for key in sorted(self.roi_obj.red_rois)] + roi_keys = [key for key in sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )] # deep copy all ROIs, set all to zero red_rois_copy = {} - for key in sorted( self.roi_obj.red_rois ): + for key in sorted( self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): red_rois_copy[key] = copy.deepcopy( self.roi_obj.red_rois[key] ) red_rois_copy[key][1] = np.zeros_like( self.roi_obj.red_rois[key][1] ) @@ -2073,7 +2073,7 @@ class roi_finder: self.row_ind -= 0 def finish( self, event ): - for key in sorted(red_rois_copy): + for key in sorted(red_rois_copy, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): red_rois_copy[key][1] = self.red_rois[key][1] plt.close() @@ -2247,11 +2247,11 @@ class roi_finder: data_dict_norm[key] = scans[0].raw_signals[key] # fetch all red_roi keys - roi_keys = [key for key in sorted(self.roi_obj.red_rois)] + roi_keys = [key for key in sorted(self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )] # deep copy all ROIs, set all to zero red_rois_copy = {} - for key in sorted( self.roi_obj.red_rois ): + for key in sorted( self.roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): red_rois_copy[key] = copy.deepcopy( self.roi_obj.red_rois[key] ) red_rois_copy[key][1] = np.zeros_like( self.roi_obj.red_rois[key][1] ) @@ -2267,7 +2267,7 @@ class roi_finder: ax0 = plt.subplot2grid( (16, 16), (0, 0), colspan=16, rowspan=16 ) new_rois = {} - for counter, key in enumerate(sorted(pw_data)): # go through each matrix (one per ROI) + for counter, key in enumerate(sorted(pw_data, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) ): # go through each matrix (one per ROI) # decompose data, choose method if method == 'nnma': # non negative matrix factorisation diff --git a/XRStools/superresolution.py b/XRStools/superresolution.py index d736538..198f001 100644 --- a/XRStools/superresolution.py +++ b/XRStools/superresolution.py @@ -181,7 +181,7 @@ class imageset: def loadkimberlite(self,matfilename): data_dict = io.loadmat(matfilename) - sorted_keys = sorted(data_dict.keys()) + sorted_keys = sorted(data_dict.keys(), key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) sy = data_dict['sy'][0] allsx = [] for key in sorted_keys[3:12]: diff --git a/XRStools/xrs_alignment.py b/XRStools/xrs_alignment.py index 2b64730..b9a65ab 100644 --- a/XRStools/xrs_alignment.py +++ b/XRStools/xrs_alignment.py @@ -83,7 +83,7 @@ def optimize_analyzer_focus(path, SPECfname, EDFprefix, EDFname, EDFpostfix, roi scan72.get_raw_signals( roi_obj, method='pixel') # measure the FWHM for each ROI - for ii,key in enumerate(sorted(scan72.raw_signals)): + for ii,key in enumerate(sorted(scan72.raw_signals , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): motor_scale = scan72.counters[scan72_motornames[ii]] points = [] heights = [] # index 1 diff --git a/XRStools/xrs_imaging.py b/XRStools/xrs_imaging.py index 0d75ae3..7372718 100644 --- a/XRStools/xrs_imaging.py +++ b/XRStools/xrs_imaging.py @@ -551,7 +551,8 @@ class imageset: def loadkimberlite(self,matfilename): data_dict = io.loadmat(matfilename) - sorted_keys = sorted(data_dict.keys()) + sorted_keys = sorted(data_dict.keys() , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) + sy = data_dict['sy'][0] allsx = [] for key in sorted_keys[3:12]: diff --git a/XRStools/xrs_read.py b/XRStools/xrs_read.py index 0d8d5d3..cf35ca3 100644 --- a/XRStools/xrs_read.py +++ b/XRStools/xrs_read.py @@ -514,7 +514,7 @@ class Hydra: self.errors = np.zeros((len(self.energy),len(self.cenom_dict))) master_eloss = (self.energy - np.median([self.cenom_dict[key] for key in self.cenom_dict]))*1.0e3 self.E0 = np.median([self.cenom_dict[key] for key in self.cenom_dict]) - for key,ii in zip(sorted(self.cenom_dict), range(len(self.cenom_dict))): + for key,ii in zip(sorted(self.cenom_dict, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.cenom_dict))): # signals x = ( self.energy - self.cenom_dict[key] )*1.0e3 y = self.raw_signals[key][:] @@ -539,7 +539,7 @@ class Hydra: self.errors = np.zeros((len(self.energy),len(self.cenom_dict))) master_eloss = ( self.energy - np.median(self.cenom_dict[first_key][self.cenom_dict[first_key] > 0.0]) )*1.0e3 self.E0 = np.median(self.cenom_dict[first_key][self.cenom_dict[first_key] > 0.0]) - for key,ii in zip(sorted(self.cenom_dict), range(len(self.cenom_dict))): + for key,ii in zip(sorted(self.cenom_dict, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.cenom_dict))): print ('Pixel-by-pixel compensation for ' + key +'.') signal = np.zeros(len(master_eloss)) error = np.zeros(len(master_eloss)) @@ -570,7 +570,7 @@ class Hydra: self.errors = np.zeros((len(self.energy),len(self.roi_obj.red_rois))) energy = self.energy * 1e3 # energy in eV - for key,ii in zip(sorted(self.raw_signals), range(len(self.raw_signals))): + for key,ii in zip(sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.raw_signals))): y = self.raw_signals[key] meanii = len(range(y.shape[1]))/2 yc = np.zeros_like(y) @@ -1074,7 +1074,7 @@ class Hydra: # normalize data pw_matrices_norm = [] - for key in sorted(raw_signals): + for key in sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' , 'row']: @@ -1098,7 +1098,7 @@ class Hydra: # normalize data (first scan) pw_matrices_norm = [] - for key in sorted(raw_signals): + for key in sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' , 'row' ]: @@ -1119,7 +1119,7 @@ class Hydra: raw_signals = self.scans[scanname].raw_signals # dict with raw_signals monitor = self.scans[scanname].monitor - for key,jj in zip(sorted(raw_signals), range(len(raw_signals))): + for key,jj in zip(sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(raw_signals))): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' , 'row']: @@ -1173,7 +1173,7 @@ class Hydra: self.scans[scan_name].get_raw_signals( self.roi_obj, method='sum' ) # find CENOM of each ROI - for key in sorted(self.scans[scan_name].raw_signals): + for key in sorted(self.scans[scan_name].raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): cofm = xrs_utilities.find_center_of_mass(self.scans[scan_name].energy,self.scans[scan_name].raw_signals[key]) self.cenom_dict[key] = cofm @@ -1192,7 +1192,7 @@ class Hydra: self.scans[scan_name].get_raw_signals( self.roi_obj, method='pixel' ) # find CENOM for each pixel of each ROI - for key in sorted(self.scans[scan_name].raw_signals): + for key in sorted(self.scans[scan_name].raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): self.cenom_dict[key] = np.zeros_like(self.roi_obj.red_rois[key][1],dtype='float') for ii in range(self.cenom_dict[key].shape[0]): for jj in range(self.cenom_dict[key].shape[1]): @@ -1393,7 +1393,7 @@ class Hydra_imaging(Hydra): # parse energy scale energy = np.array([]) - for scan_key in sorted(self.scans): + for scan_key in sorted(self.scans, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): energy = np.append(energy, self.scans[scan_key].motors['energy']) energy = np.array(list(set(energy))) @@ -1402,7 +1402,7 @@ class Hydra_imaging(Hydra): for roi_key, (pos,M) in sorted(self.roi_obj.red_rois.items()): signals[roi_key] = np.zeros( (len(energy), ) ) for en,estep in zip(energy, range(len(energy))): - for scan_key in sorted(self.scans): + for scan_key in sorted(self.scans, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if self.scans[scan_key].motors['energy'] == en: signals[roi_key][estep] += np.sum(self.scans[scan_key].raw_signals[roi_key]) @@ -1430,7 +1430,7 @@ class Hydra_imaging(Hydra): """ # define master energy-loss scale energy = np.array([]) - for scan_key in sorted(self.scans): + for scan_key in sorted(self.scans, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): energy = np.append(energy, self.scans[scan_key].motors['energy']) energy = np.sort(np.array(list(set(energy)))) @@ -1451,7 +1451,7 @@ class Hydra_imaging(Hydra): for roi_key, (pos,M) in sorted(self.roi_obj.red_rois.items()): self.raw_signals[roi_key] = np.zeros( (len(energy), len(scan_scale_1), M.shape[1], len(scan_scale_2)) ) for en,estep in zip(energy, range(len(energy))): - for scan_key in sorted(self.scans): + for scan_key in sorted(self.scans, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if self.scans[scan_key].motors['energy'] == en: for zstep in range(len(scan_scale_2)): if self.scans[scan_key].motors[step_motor] == scan_scale_2[zstep]: @@ -1459,7 +1459,7 @@ class Hydra_imaging(Hydra): # interpolate everything onto master energy-loss scale self.raw_signals_int = {} - for roi_key in sorted(self.raw_signals): + for roi_key in sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): x = (energy - self.cenom_dict[roi_key])*1.0e3 y = self.raw_signals[roi_key] f = interp1d(x, y, kind='linear', axis=0, bounds_error=False, fill_value=0.0) @@ -1722,7 +1722,7 @@ class Fourc: elastic_scan.get_raw_signals( self.roi_obj, method='pixel' ) # find CENOM for each pixel of each ROI - for key in sorted(elastic_scan.raw_signals): + for key in sorted(elastic_scan.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): self.cenom_dict[key] = np.zeros_like(self.roi_obj.red_rois[key][1]) for ii in range(self.cenom_dict[key].shape[0]): for jj in range(self.cenom_dict[key].shape[1]): @@ -2087,7 +2087,7 @@ class Fourc: # normalize data pw_matrices_norm = [] - for key in sorted(raw_signals): + for key in sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' , 'row']: @@ -2111,7 +2111,7 @@ class Fourc: # normalize data (first scan) pw_matrices_norm = [] - for key in sorted(raw_signals): + for key in sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' , 'row']: @@ -2132,7 +2132,7 @@ class Fourc: raw_signals = self.scans[scanname].raw_signals # dict with raw_signals monitor = self.scans[scanname].monitor - for key,jj in zip(sorted(raw_signals), range(len(raw_signals))): + for key,jj in zip(sorted(raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(raw_signals))): if method == 'pixel': unrav_mat = np.zeros((raw_signals[key].shape[0], raw_signals[key].shape[1]*raw_signals[key].shape[2])) elif method in [ 'column' ,'row'] : diff --git a/XRStools/xrs_rois.py b/XRStools/xrs_rois.py index 50b56f5..63d519c 100644 --- a/XRStools/xrs_rois.py +++ b/XRStools/xrs_rois.py @@ -202,7 +202,7 @@ class roi_object: # append the other ROIs self_len = len( new_obj.red_rois ) - for ii,key in enumerate( sorted( roi_obj.red_rois ) ): + for ii,key in enumerate( sorted( roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) ): new_key = 'ROI%02d'%( ii+self_len ) if not new_key in list( new_obj.red_rois.keys() ): new_obj.red_rois[new_key] = roi_obj.red_rois[key] @@ -328,7 +328,7 @@ class roi_object: orig_length = len( self.red_rois ) - for ii,key in enumerate(sorted(roi_object.red_rois)): + for ii,key in enumerate(sorted(roi_object.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )): new_key = 'ROI%02d'%(ii+orig_length) self.red_rois[new_key] = roi_object.red_rois[key] self.red_rois[new_key][1][ self.red_rois[new_key][1]>0 ] += orig_length @@ -457,7 +457,7 @@ class roi_object: # delete last ROI if no key is specified if not roi_key: - roi_key = sorted(list( self.red_rois.keys()))[-1] + roi_key = sorted(list( self.red_rois.keys()) , key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) )[-1] # make sure the ROI exists assert(roi_key in list(self.red_rois.keys()) ) @@ -521,7 +521,7 @@ class roi_object: labelbottom=True ) # draw ROIs and labels - for ii, key in enumerate( sorted(self.red_rois) ): + for ii, key in enumerate( sorted(self.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) ): # plot the ROI as frame corner = self.red_rois[key][0] inset = self.red_rois[key][1] diff --git a/XRStools/xrs_scans.py b/XRStools/xrs_scans.py index 83a9f8d..c2693bc 100644 --- a/XRStools/xrs_scans.py +++ b/XRStools/xrs_scans.py @@ -376,7 +376,7 @@ class Scan: errors = {} # sqrt of the sum of counts rot_angles_dict = {} # put possible rotation angles into dict counter = 0 - for key, (pos, M) in sorted(roi_obj.red_rois.items()): + for key, (pos, M) in sorted(roi_obj.red_rois.items() ): signals[key] = np.zeros((len(self.energy), M.shape[0])) errors[key] = np.zeros((len(self.energy), M.shape[0])) if rot_angles is not None: @@ -461,7 +461,7 @@ class Scan: self.__signals_normalized__ = True # assign - for key,ii in zip(sorted(roi_obj.red_rois), range(len(roi_obj.red_rois))): + for key,ii in zip(sorted(roi_obj.red_rois, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(roi_obj.red_rois))): if np.any(scaling): self.raw_signals[key] = signals[key] * scaling[ii] self.raw_errors[key] = errors[key] * scaling[ii] @@ -494,7 +494,7 @@ class Scan: if method == 'sum': self.signals = np.zeros(( len(self.energy), len(self.raw_signals) )) self.errors = np.zeros(( len(self.energy), len(self.raw_signals) )) - for key,ii in zip(sorted(self.raw_signals), range(len(self.raw_signals))): + for key,ii in zip(sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.raw_signals))): if not self.__signals_normalized__: self.signals[:,ii] = self.raw_signals[key]/self.monitor self.errors[:,ii] = self.raw_errors[key]/self.monitor @@ -513,7 +513,7 @@ class Scan: return self.signals = np.zeros(( len(self.energy), len(self.raw_signals) )) self.errors = np.zeros(( len(self.energy), len(self.raw_signals) )) - for key,ii in zip(sorted(self.raw_signals), range(len(self.raw_signals))): + for key,ii in zip(sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.raw_signals))): S = cenom_dict[key].shape master_cenom = cenom_dict[key][int(S[0]/2.),int(S[1]/2.)] for dim1 in range(self.raw_signals[key].shape[1]): @@ -545,7 +545,7 @@ class Scan: self.errors = np.zeros(( len(self.energy), len(self.raw_signals) )) energy = self.energy #* 1e3 # energy in eV ## meanmon = np.mean(self.monitor) - for key,ii in zip(sorted(self.raw_signals), range(len(self.raw_signals))): + for key,ii in zip(sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.raw_signals))): S = cenom_dict[key].shape #the_hist = np.histogram(cenom_dict[key][cenom_dict[key]>0.0 ], bins=10) #master_cenom = np.average(the_hist[1][1:], weights= the_hist[0])#cenom_dict[key][int(S[0]/2.),int(S[1]/2.)] @@ -583,7 +583,7 @@ class Scan: self.errors = np.zeros(( len(self.energy), len(self.raw_signals) )) energy = self.energy * 1e3 # energy in eV ### meanmon = np.mean(self.monitor) - for key,ii in zip(sorted(self.raw_signals), range(len(self.raw_signals))): + for key,ii in zip(sorted(self.raw_signals, key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ), range(len(self.raw_signals))): y = self.raw_signals[key] yn = (y.T/self.monitor).T #### * meanmon CHECK THIS REMOVAL meanii = len(range(yn.shape[1]))//2 @@ -1569,7 +1569,7 @@ def findgroups(scans): for scan in scans: print( scan ) allscannames.append(scan) - allscannames.sort() # + allscannames.sort( key = lambda x: int(''.join(filter(str.isdigit, str(x) ))) ) # allscans = [] for scan in allscannames: allscans.append(scans[scan]) -- GitLab From f60b23a020f9c1e1436fee1d1b3c47dc427f8901 Mon Sep 17 00:00:00 2001 From: Alessandro <mirone@esrf.fr> Date: Wed, 9 Dec 2020 19:06:49 +0100 Subject: [PATCH 2/2] all non reression tests pass --- .../esynth/batch_extraction_esynth1.py | 9 ++- .../interpolation/batch_extraction_interp.py | 63 +++++++++++++------ 2 files changed, 49 insertions(+), 23 deletions(-) diff --git a/nonregressions/volumes/esrf_scans/esynth/batch_extraction_esynth1.py b/nonregressions/volumes/esrf_scans/esynth/batch_extraction_esynth1.py index 202a0f5..186863d 100644 --- a/nonregressions/volumes/esrf_scans/esynth/batch_extraction_esynth1.py +++ b/nonregressions/volumes/esrf_scans/esynth/batch_extraction_esynth1.py @@ -11,12 +11,13 @@ from XRStools import xrs_read, xrs_rois import os def main(): - os.system("xz -dk mask.h5.xz --stdout > mask.h5") + os.system("xz -dk ../mask.h5.xz --stdout > mask.h5") + filter_path = "mask.h5:/FILTER_MASK/filter" roi_scan_num = [245,246,247] reference_scan_list = [245, 246, 247] - + monitor_column = "izero/0.000001" first_scan_num = 651 @@ -56,7 +57,7 @@ def main(): "do_step_scalars" : False, - "do_step_interpolation_coefficients": True, + "do_step_interpolation_coefficients": False, "do_step_finalise_for_fit": True } @@ -138,6 +139,8 @@ def main(): ) + + main() diff --git a/nonregressions/volumes/esrf_scans/interpolation/batch_extraction_interp.py b/nonregressions/volumes/esrf_scans/interpolation/batch_extraction_interp.py index d653b92..c81cb0a 100644 --- a/nonregressions/volumes/esrf_scans/interpolation/batch_extraction_interp.py +++ b/nonregressions/volumes/esrf_scans/interpolation/batch_extraction_interp.py @@ -5,7 +5,7 @@ import json import os import h5py import math -from XRStools import tools_sequencer_interp +from XRStools import tools_sequencer_interp, xrs_rois, xrs_read import os def main(): os.system("xz -dk ../mask.h5.xz --stdout > mask.h5 ") @@ -37,26 +37,28 @@ def main(): ["niter_global" , 3 ] ]) - resynth_z_square = 0 + resynth_z_square = 0.0 - selected_rois = list(range(0,24)) + list( range(36,60) ) + # selected_rois = list(range(0,24)) + list( range(36,60) ) + selected_rois = list(range(24,36)) + list( range(60,72) ) - scal_prod_use_optional_solution = True + scal_prod_use_optional_solution = False - volume_retrieval_beta = 6.0e-20 + volume_retrieval_beta = 6.0e-1 volume_retrieval_niter = 100 steps_to_do = { "do_step_make_roi": False, - "do_step_sample_extraction": True, + "do_step_sample_extraction": False, "do_step_interpolation": False, "do_step_extract_reference_scan": False, "do_step_fit_reference_response": False, "do_step_resynthetise_reference": False, - "do_step_scalars" : True, - "do_step_volume_retrieval" : False, + "do_step_scalars" : False, + "do_step_volume_retrieval" : True, + "do_step_put_all_in_one_stack" : True } @@ -81,17 +83,19 @@ def main(): ###### LOADING PEAKS SHIFTS ###### peaks_shifts = h5py.File("../peaks_positions_for_analysers.h5","r")["peaks_positions"][()] ###### assert( len(peaks_shifts) == 72) - - roiob = xrs_rois.roi_object() - roiob.loadH5( roi_target_path ) - elastic = xrs_read.Hydra( datadir ) - elastic.set_roiObj( roiob ) - elastic.get_compensation_factor( elastic_scan_for_peaks_shifts , method='sum') - el_dict = elastic.cenom_dict - Enominal = np.median( list( el_dict.values() ) ) - peaks_shift = np.array([ el_dict["ROI%02d"%i] if ("ROI%02d"%i) in el_dict else nan for i in range 72 ] ) - Enominal = np.median(peaks_shifts) - peaks_shifts-= Enominal + if steps_to_do["do_step_interpolation"]: + roiob = xrs_rois.roi_object() + roiob.loadH5( roi_target_path ) + elastic = xrs_read.Hydra( datadir ) + elastic.set_roiObj( roiob ) + elastic.get_compensation_factor( elastic_scan_for_peaks_shifts , method='sum') + el_dict = elastic.cenom_dict + Enominal = np.median( list( el_dict.values() ) ) + peaks_shifts = np.array([ el_dict["ROI%02d"%i] if ("ROI%02d"%i) in el_dict else nan for i in range( 72) ] ) + Enominal = np.median(peaks_shifts) + peaks_shifts-= Enominal + else: + peaks_shifts = None ############################################################## ########################################################################## @@ -121,7 +125,7 @@ def main(): response_fit_options = response_fit_options, resynthetised_reference_and_roi_target_file = resynthetised_reference_and_roi_target_file, - resynth_z_square = 0 + resynth_z_square = 0, selected_rois = selected_rois, @@ -135,6 +139,25 @@ def main(): + if steps_to_do["do_step_put_all_in_one_stack"] : + + volumefile = scalar_products_and_volume_target_file + + h5file_root = h5py.File( volumefile ,"r+" ) + + scankeys = list( h5file_root.keys()) + scankeys.sort() + volumes = [] + for k in scankeys: + if k[:1]!="_": + continue + print( k) + if "volume" in h5file_root[k]: + volumes.append( h5file_root[k]["volume"] ) + # volume = np.concatenate(volumes,axis=0) + volume = np.array(volumes) + h5py.File("concatenated_volume.h5","w")["volume"] = volume + h5file_root.close() main() -- GitLab