Commit 00233a02 authored by myron's avatar myron
Browse files

restructuring the volume extraction and xrs_swissknife

parent 23322b8e
...@@ -191,6 +191,12 @@ from yaml.constructor import Constructor, MappingNode, ConstructorError ...@@ -191,6 +191,12 @@ from yaml.constructor import Constructor, MappingNode, ConstructorError
from XRStools import fit_spectra from XRStools import fit_spectra
from XRStools import reponse_percussionelle from XRStools import reponse_percussionelle
def check_allowed( mydata, allowed_keys ) :
for k in mydata.keys():
if not k in allowed_keys:
raise ValueError( (" key "+str(k) +" not in allowed keys :": + str(allowed_keys)) )
def dump_anydict_as_map( anydict): def dump_anydict_as_map( anydict):
yaml.add_representer( anydict, _represent_dictorder) yaml.add_representer( anydict, _represent_dictorder)
...@@ -352,81 +358,6 @@ def help(yamlData): ...@@ -352,81 +358,6 @@ def help(yamlData):
print( func.__doc__) print( func.__doc__)
def Extraction(mydata):
"""
**Extraction**
Function to extract the interesting signal after removal of Compton profile,
linear baselines, Pearson profile
Example ::
Extraction :
active : 1
dataadress : "pippo.h5:/ROI_A/loaded_datas" # where load_scans wrote data
HFaddress : "pippo.h5:/ROI_A/loaded_datas/HF_O" # where compton profiles have been calculated
# prenormrange : [ 5 , .inf ]
analyzerAverage : # averaging over analysers
active : 1
which : [0,11 , 36,59 ]
errorweighing : False
removeLinearAv : # fit a linear baseline and remove it
active : 1
region1 : [520.0,532.0]
region2 : None
ewindow : 100
scale : 1
removePearsonAv: # fit a Pearson and remove it
active : 0
region1 : [520.0,532.0]
region2 : None
guess :
Peak_position : 600.0
FWHM : 10
Shape : "Lorentzian"
Peak_intensity: 100.0
linear_slope : 1
linear_background : 0
scaling_factor : 1
view : 0
target : "myextraction" # path relative to dataadress where extracted signal will be written
"""
reader , filename, groupname= read_reader(mydata, name="dataadress")
HF = read_HF(mydata, name="hfspectrum_address")
extr = extraction.extraction(reader , HF)
if ("analyzerAverage") in mydata :
aa_data = mydata["analyzerAverage"]
if gvord(aa_data,"active",True):
which = aa_data["which"]
errorweighing = gvord(aa_data,"errorweighing",False)
extr .analyzerAverage(which,errorweighing=errorweighing)
if ("removeLinearAv") in mydata :
rla_data = mydata["removeLinearAv"]
if gvord(rla_data,"active",True):
region1 = rla_data["region1"]
region2 = gvord( rla_data,"region2",None)
ewindow = gvord( rla_data,"ewindow",100)
scale = gvord( rla_data,"scale",100)
extr .removeLinearAv(region1, region2=region2,ewindow=ewindow,
scale=scale, view = gvord(mydata,"view",False),
)
print( gvord(mydata,"view",False))
groupname = groupname+"/"+ mydata["target"]
check_libre( filename , groupname )
extr.save_state_hdf5( filename,groupname, comment = inputtext )
def split_hdf5_address(dataadress): def split_hdf5_address(dataadress):
...@@ -440,233 +371,188 @@ but : was not found ...@@ -440,233 +371,188 @@ but : was not found
return filename, groupname return filename, groupname
def read_HF(mydata, name="hfspectrum_address"):
dataadress = mydata[name]
filename, groupname = split_hdf5_address(dataadress)
HF = theory.HFspectrum(None,None,None, initialise=False)
HF.load_state_hdf5( filename, groupname)
return HF
def HFspectrum(mydata):
"""
**HFspectrum**
function for building S(q,w) from tabulated Hartree-Fock Compton profiles to use in
the extraction algorithm.
EXAMPLE ::
dataadress : "hdf5filename:full_nameof_signals_group" # where load_scans wrote data
formulas : ['O'] # list of strings of chemical sum formulas of which the sample is made up
concentrations : [1.0] # list of concentrations of how the different chemical formulas are mixed (sum should be 1)
correctasym : [[0.0,0.0,0.0]] # single value or list of scaling values for the HR-correction to
# the 1s, 2s, and 2p shells. one value per element in the list of formulas
hfspectrum_address : "nameofgroup" # Target group for writing Relative to dataadress (and in the same file)!!!!
"""
reader , filename, groupname= read_reader(mydata, name="dataadress")
hf = theory.HFspectrum(reader ,
mydata["formulas"] ,
mydata["concentrations"] ,
mydata["correctasym"]
)
groupname = groupname+"/"+ mydata["hfspectrum_address"]
check_libre( filename , groupname )
hf.save_state_hdf5( filename,groupname , comment = inputtext )
# def load_scans(mydata):
# """
# **load_scans**
def load_scans(mydata): # This command harvest the selected signals.
""" # the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
**load_scans**
This command harvest the selected signals.
the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
load_scans : # load_scans :
roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois # roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois
expdata : "absolutepathtoaspecfile" # this points to a spec file # expdata : "absolutepathtoaspecfile" # this points to a spec file
elastic_scans : [623] # elastic_scans : [623]
fine_scans : [626,630,634,638,642] # fine_scans : [626,630,634,638,642]
n_loop : 4 # n_loop : 4
long_scan : 624 # long_scan : 624
signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!! # signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!!
############################################################# # #############################################################
# OPTIONALS # # OPTIONALS
# # #
order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the # order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the
# ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5]) # # ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5])
rvd : -41 # mean tth angle of HL module (default is 0.0) # rvd : -41 # mean tth angle of HL module (default is 0.0)
rvu : 85 # mean tth angle of HR module (default is 0.0) # rvu : 85 # mean tth angle of HR module (default is 0.0)
rvb : 121.8 # mean tth angle of HB module (default is 0.0) # rvb : 121.8 # mean tth angle of HB module (default is 0.0)
rhl : 41.0 # mean tth angle of VD module (default is 0.0) # rhl : 41.0 # mean tth angle of VD module (default is 0.0)
rhr : 41.0 # mean tth angle of VU module (default is 0.0) # rhr : 41.0 # mean tth angle of VU module (default is 0.0)
rhb : 121.8 # mean tth angle of VB module (default is 0.0) # rhb : 121.8 # mean tth angle of VB module (default is 0.0)
# # #
""" # """
roiaddress=None # roiaddress=None
roiaddress = mydata["roiaddress"] # roiaddress = mydata["roiaddress"]
filename, groupname = split_hdf5_address (roiaddress) # filename, groupname = split_hdf5_address (roiaddress)
file= h5py.File(filename,"r") # file= h5py.File(filename,"r")
rois = {} # rois = {}
shape=xrs_rois.load_rois_fromh5(file[groupname],rois) # shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
file.close() # file.close()
roiob = xrs_rois.roi_object() # roiob = xrs_rois.roi_object()
roiob.load_rois_fromMasksDict(rois , newshape = shape, kind="zoom") # roiob.load_rois_fromMasksDict(rois , newshape = shape, kind="zoom")
reader = xrs_read.read_id20(mydata["expdata"] , monitorcolumn='kapraman') # reader = xrs_read.read_id20(mydata["expdata"] , monitorcolumn='kapraman')
reader.set_roiObj(roiob) # reader.set_roiObj(roiob)
elastic_scans = mydata["elastic_scans"][:] # elastic_scans = mydata["elastic_scans"][:]
fine_scans = mydata["fine_scans"][:] # fine_scans = mydata["fine_scans"][:]
n_loop = mydata["n_loop"] # n_loop = mydata["n_loop"]
long_scan = mydata["long_scan"] # long_scan = mydata["long_scan"]
reader.loadelasticdirect(elastic_scans) # reader.loadelasticdirect(elastic_scans)
reader.loadloopdirect(fine_scans,n_loop) # reader.loadloopdirect(fine_scans,n_loop)
print( " LUNGO " ) # print( " LUNGO " )
reader.loadlongdirect(long_scan) # reader.loadlongdirect(long_scan)
reader.getspectrum() # reader.getspectrum()
reader.geteloss() # reader.geteloss()
reader.gettths( # reader.gettths(
rvd = gvord(mydata,"rvd",0.0) , # rvd = gvord(mydata,"rvd",0.0) ,
rvu = gvord(mydata,"rvu",0.0) , # rvu = gvord(mydata,"rvu",0.0) ,
rvb = gvord(mydata,"rvb",0.0) , # rvb = gvord(mydata,"rvb",0.0) ,
rhl = gvord(mydata,"rhl",0.0) , # rhl = gvord(mydata,"rhl",0.0) ,
rhr = gvord(mydata,"rhr",0.0) , # rhr = gvord(mydata,"rhr",0.0) ,
rhb = gvord(mydata,"rhb",0.0) , # rhb = gvord(mydata,"rhb",0.0) ,
order = gvord(mydata,"order", [0,1,2,3,4,5]) # order = gvord(mydata,"order", [0,1,2,3,4,5])
) # )
groupname = groupname+"/"+ mydata["signaladdress"] # groupname = groupname+"/"+ mydata["signaladdress"]
check_libre( filename , groupname ) # check_libre( filename , groupname )
reader.save_state_hdf5( filename, groupname , comment = inputtext ) # reader.save_state_hdf5( filename, groupname , comment = inputtext )
def volume_from_2Dimages(mydata): # def volume_from_2Dimages(mydata):
""" # """
imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved # imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved
scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file # scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file
roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0 # roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0
imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly. # imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly.
""" # """
reader = xrs_imaging.oneD_imaging( "bidon" , "bidon", "bidon" , "bidon") # reader = xrs_imaging.oneD_imaging( "bidon" , "bidon", "bidon" , "bidon")
imagesaddress = mydata["imagesaddress"] # imagesaddress = mydata["imagesaddress"]
filename, groupname = split_hdf5_address(imagesaddress) # filename, groupname = split_hdf5_address(imagesaddress)
reader.load_state_hdf5( filename, groupname) # reader.load_state_hdf5( filename, groupname)
scan_names = list( reader.twoDimages.keys() ) # scan_names = list( reader.twoDimages.keys() )
scan_ids = map(int, [name[4:] for name in scan_names ] ) # scan_ids = map(int, [name[4:] for name in scan_names ] )
order = np.argsort(scan_ids) # order = np.argsort(scan_ids)
if not ('scan_interval') in mydata : # if not ('scan_interval') in mydata :
scan_names = [ scan_names[id] for id in order ] # scan_names = [ scan_names[id] for id in order ]
else: # else:
scan_interval = mydata['scan_interval'] # scan_interval = mydata['scan_interval']
print( order) # print( order)
print( scan_names) # print( scan_names)
print( scan_interval) # print( scan_interval)
scan_names = [ scan_names[id] for id in order if scan_ids[id]>=scan_interval[0] and scan_ids[id]<scan_interval[1] ] # scan_names = [ scan_names[id] for id in order if scan_ids[id]>=scan_interval[0] and scan_ids[id]<scan_interval[1] ]
first_name = scan_names[0] # first_name = scan_names[0]
roi_n=0 # roi_n=0
if not ('roi_n' in mydata ): # if not ('roi_n' in mydata ):
while(1): # while(1):
shape = reader.twoDimages[first_name][roi_n].matrix.shape # shape = reader.twoDimages[first_name][roi_n].matrix.shape
if shape != (0,) : # if shape != (0,) :
break # break
roi_n+=1 # roi_n+=1
else: # else:
roi_n = mydata["roi_n"] # roi_n = mydata["roi_n"]
shape = reader.twoDimages[first_name][roi_n].matrix.shape # shape = reader.twoDimages[first_name][roi_n].matrix.shape
Volume = np.zeros(( shape[0], shape[1] , len(scan_names) )) # Volume = np.zeros(( shape[0], shape[1] , len(scan_names) ))
for i,scanname in enumerate(scan_names): # for i,scanname in enumerate(scan_names):
Volume[:,:,i] = reader.twoDimages[scanname][roi_n].matrix # Volume[:,:,i] = reader.twoDimages[scanname][roi_n].matrix
if ('volumeaddress') in mydata : # if ('volumeaddress') in mydata :
filename, groupname = split_hdf5_address( mydata['volumeaddress'] ) # filename, groupname = split_hdf5_address( mydata['volumeaddress'] )
h5=h5py.File(filename,'a') # h5=h5py.File(filename,'a')
check_libre( h5 , groupname ) # check_libre( h5 , groupname )
h5[groupname] = Volume # h5[groupname] = Volume
h5.close() # h5.close()
h5=None # h5=None
else: # else:
view_Volume_myavi_(Volume) # view_Volume_myavi_(Volume)
def view_Volume_myavi(mydata): # def view_Volume_myavi(mydata):
""" # """
volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume. # volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume.
""" # """
filename, groupname = split_hdf5_address( mydata['volume_address'] ) # filename, groupname = split_hdf5_address( mydata['volume_address'] )
h5=h5py.File(filename,'r') # h5=h5py.File(filename,'r')
Volume = h5[groupname] [:] # Volume = h5[groupname] [:]
h5.close() # h5.close()
h5=None # h5=None
isolevel = mydata['isolevel'] # isolevel = mydata['isolevel']
opacity = mydata['opacity'] # opacity = mydata['opacity']
view_Volume_myavi_(Volume, isolevel, opacity) # view_Volume_myavi_(Volume, isolevel, opacity)
def view_Volume_myavi_(V, isolevel, opacity) : # def view_Volume_myavi_(V, isolevel, opacity) :
print( " IN view ") # print( " IN view ")
src = mlab.pipeline.scalar_field(V) # src = mlab.pipeline.scalar_field(V)
mlab.pipeline.iso_surface(src, contours=[V.min()+isolevel*V.ptp(), ], opacity=opacity) # mlab.pipeline.iso_surface(src, contours=[V.min()+isolevel*V.ptp(), ], opacity=opacity)
mlab.show() # mlab.show()
src = mlab.pipeline.scalar_field(V) # src = mlab.pipeline.scalar_field(V)
mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0) # mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0)
mlab.show() # mlab.show()
def calculate_recenterings(mydata): def calculate_recenterings(mydata):
...@@ -683,6 +569,8 @@ def calculate_recenterings(mydata): ...@@ -683,6 +569,8 @@ def calculate_recenterings(mydata):
target: "recenterings.h5:/recenterings4rois" target: "recenterings.h5:/recenterings4rois"
# #
""" """
allowed_keys =["bariA","bariB","target", ]
check_allowed_keys(mydata, allowed_keys)
bariA = mydata["bariA"] bariA = mydata["bariA"]
bariA_filename, bariA_groupname = split_hdf5_address( bariA ) bariA_filename, bariA_groupname = split_hdf5_address( bariA )
...@@ -736,234 +624,229 @@ def calculate_recenterings(mydata): ...@@ -736,234 +624,229 @@ def calculate_recenterings(mydata):
h5f = None h5f = None
def sum_scans2maps(mydata): # def sum_scans2maps(mydata):
roiaddress=None # roiaddress=None
roiaddress = mydata["mask_file"] # roiaddress = mydata["mask_file"]
filename, groupname = split_hdf5_address( roiaddress) # filename, groupname = split_hdf5_address( roiaddress)
file= h5py.File(filename,"r") # file= h5py.File(filename,"r")
rois = {} # rois = {}
shape=xrs_rois.load_rois_fromh5(file[groupname],rois) # shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
file.close() # file.close()
specfile_name = mydata["spec_file"] # specfile_name = mydata["spec_file"]
Scan_Variable = mydata["Scan_Variable"] # Scan_Variable = mydata["Scan_Variable"]
Motor_Variable = mydata["Motor_Variable"] # Motor_Variable = mydata["Motor_Variable"]
specfile = SpecIO.Specfile( specfile_name ) # specfile = SpecIO.Specfile( specfile_name )
dirname = os.path.dirname( specfile_name ) # dirname = os.path.dirname( specfile_name )
basename = os.path.basename( specfile_name ) # basename = os.path.basename( specfile_name )
scans_infos = [] # scans_infos = []
signals = [] # signals = []
s1 = int(mydata["first_scan"]) # s1 = int(mydata["first_scan"])
s2 = int(mydata["last_scan"]) # s2 = int(mydata["last_scan"])
roi_names = list(rois.keys()) # roi_names = list(rois.keys())
roi_list = [ rois[k] for k in roi_names ] # roi_list = [ rois[k] for k in roi_names ]
for i in range(s1,s2+1): # for i in range(s1,s2+1):
# print " SCAN lettura " , i # # print " SCAN lettura " , i
scan = specfile.select(str(i)) # scan = specfile.select(str(i))
scan_data = scan.data() # scan_data = scan.data()
scan_themotor = scan.motorpos( Motor_Variable ) # scan_themotor = scan.motorpos( Motor_Variable )
scan_othermotors = [ scan.motorpos( name ) for name in scan.allmotors() if name != Motor_Variable ] # scan_othermotors = [ scan.motorpos( name ) for name in scan.allmotors() if name != Motor_Variable ]
othermotorsname = [name for name in scan.allmotors() if name != Motor_Variable] # othermotorsname = [name for name in scan.allmotors() if name != Motor_Variable]
labels = scan.alllabels() # labels = scan.alllabels()
scan_variable = scan_data[ labels.index( Scan_Variable ) , :] # scan_variable = scan_data[ labels.index( Scan_Variable ) , :]
scan_ccdnos = scan_data[ labels.index( "ccdno" ) , :].astype("i") # scan_ccdnos = scan_data[ labels.index( "ccdno" ) , :].astype("i")
signal = [] # signal = []
for no in scan_ccdnos: # for no in scan_ccdnos:
print( " opening image ", os.path.join( dirname , "edf", basename+"_"+str(no)+".edf")) # print( " opening image ", os.path.join( dirname , "edf", basename+"_"+str(no)+".edf"))
data = fabio.open( os.path.join( dirname , "edf", basename+"_"+str(no)+".edf" ) ).data # data = fabio.open( os.path.join( dirname , "edf", basename+"_"+str(no)+".edf" ) ).data
tok = [ (data[corner[0]:corner[0]+mask.shape[0], corner[1]:corner[1]+mask.shape[1]]*mask).sum() for corner, mask in roi_list ] # tok = [ (data[corner[0]:corner[0]+mask.shape[0], corner[1]:corner[1]+mask.shape[1]]*mask).sum() for corner, mask in roi_list ]
signal.append(tok) # signal.append(tok)
# print " OK " # # print " OK "
# print signal # # print signal
# print " Appendo " , signal # # print " Appendo " , signal
signals.append(np.array(signal)) # signals.append(np.array(signal))
# print "OK " # # print "OK "
scans_infos.append( [scan_themotor, scan_othermotors, scan_variable ] ) # scans_infos.append( [scan_themotor, scan_othermotors, scan_variable ] )