Commit 00233a02 authored by myron's avatar myron
Browse files

restructuring the volume extraction and xrs_swissknife

parent 23322b8e
......@@ -192,6 +192,12 @@ from XRStools import fit_spectra
from XRStools import reponse_percussionelle
def check_allowed( mydata, allowed_keys ) :
for k in mydata.keys():
if not k in allowed_keys:
raise ValueError( (" key "+str(k) +" not in allowed keys :": + str(allowed_keys)) )
def dump_anydict_as_map( anydict):
yaml.add_representer( anydict, _represent_dictorder)
def _represent_dictorder( self, data):
......@@ -352,81 +358,6 @@ def help(yamlData):
print( func.__doc__)
def Extraction(mydata):
"""
**Extraction**
Function to extract the interesting signal after removal of Compton profile,
linear baselines, Pearson profile
Example ::
Extraction :
active : 1
dataadress : "pippo.h5:/ROI_A/loaded_datas" # where load_scans wrote data
HFaddress : "pippo.h5:/ROI_A/loaded_datas/HF_O" # where compton profiles have been calculated
# prenormrange : [ 5 , .inf ]
analyzerAverage : # averaging over analysers
active : 1
which : [0,11 , 36,59 ]
errorweighing : False
removeLinearAv : # fit a linear baseline and remove it
active : 1
region1 : [520.0,532.0]
region2 : None
ewindow : 100
scale : 1
removePearsonAv: # fit a Pearson and remove it
active : 0
region1 : [520.0,532.0]
region2 : None
guess :
Peak_position : 600.0
FWHM : 10
Shape : "Lorentzian"
Peak_intensity: 100.0
linear_slope : 1
linear_background : 0
scaling_factor : 1
view : 0
target : "myextraction" # path relative to dataadress where extracted signal will be written
"""
reader , filename, groupname= read_reader(mydata, name="dataadress")
HF = read_HF(mydata, name="hfspectrum_address")
extr = extraction.extraction(reader , HF)
if ("analyzerAverage") in mydata :
aa_data = mydata["analyzerAverage"]
if gvord(aa_data,"active",True):
which = aa_data["which"]
errorweighing = gvord(aa_data,"errorweighing",False)
extr .analyzerAverage(which,errorweighing=errorweighing)
if ("removeLinearAv") in mydata :
rla_data = mydata["removeLinearAv"]
if gvord(rla_data,"active",True):
region1 = rla_data["region1"]
region2 = gvord( rla_data,"region2",None)
ewindow = gvord( rla_data,"ewindow",100)
scale = gvord( rla_data,"scale",100)
extr .removeLinearAv(region1, region2=region2,ewindow=ewindow,
scale=scale, view = gvord(mydata,"view",False),
)
print( gvord(mydata,"view",False))
groupname = groupname+"/"+ mydata["target"]
check_libre( filename , groupname )
extr.save_state_hdf5( filename,groupname, comment = inputtext )
def split_hdf5_address(dataadress):
......@@ -441,232 +372,187 @@ but : was not found
def read_HF(mydata, name="hfspectrum_address"):
dataadress = mydata[name]
filename, groupname = split_hdf5_address(dataadress)
HF = theory.HFspectrum(None,None,None, initialise=False)
HF.load_state_hdf5( filename, groupname)
return HF
def HFspectrum(mydata):
"""
**HFspectrum**
function for building S(q,w) from tabulated Hartree-Fock Compton profiles to use in
the extraction algorithm.
EXAMPLE ::
dataadress : "hdf5filename:full_nameof_signals_group" # where load_scans wrote data
formulas : ['O'] # list of strings of chemical sum formulas of which the sample is made up
concentrations : [1.0] # list of concentrations of how the different chemical formulas are mixed (sum should be 1)
correctasym : [[0.0,0.0,0.0]] # single value or list of scaling values for the HR-correction to
# the 1s, 2s, and 2p shells. one value per element in the list of formulas
hfspectrum_address : "nameofgroup" # Target group for writing Relative to dataadress (and in the same file)!!!!
"""
# def load_scans(mydata):
# """
# **load_scans**
reader , filename, groupname= read_reader(mydata, name="dataadress")
# This command harvest the selected signals.
# the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
hf = theory.HFspectrum(reader ,
mydata["formulas"] ,
mydata["concentrations"] ,
mydata["correctasym"]
)
groupname = groupname+"/"+ mydata["hfspectrum_address"]
check_libre( filename , groupname )
hf.save_state_hdf5( filename,groupname , comment = inputtext )
def load_scans(mydata):
"""
**load_scans**
# load_scans :
# roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois
# expdata : "absolutepathtoaspecfile" # this points to a spec file
This command harvest the selected signals.
the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
# elastic_scans : [623]
# fine_scans : [626,630,634,638,642]
# n_loop : 4
# long_scan : 624
# signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!!
load_scans :
roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois
expdata : "absolutepathtoaspecfile" # this points to a spec file
# #############################################################
# # OPTIONALS
# #
# order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the
# # ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5])
elastic_scans : [623]
fine_scans : [626,630,634,638,642]
n_loop : 4
long_scan : 624
# rvd : -41 # mean tth angle of HL module (default is 0.0)
# rvu : 85 # mean tth angle of HR module (default is 0.0)
# rvb : 121.8 # mean tth angle of HB module (default is 0.0)
# rhl : 41.0 # mean tth angle of VD module (default is 0.0)
# rhr : 41.0 # mean tth angle of VU module (default is 0.0)
# rhb : 121.8 # mean tth angle of VB module (default is 0.0)
signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!!
#############################################################
# OPTIONALS
#
order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the
# ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5])
# #
# """
rvd : -41 # mean tth angle of HL module (default is 0.0)
rvu : 85 # mean tth angle of HR module (default is 0.0)
rvb : 121.8 # mean tth angle of HB module (default is 0.0)
rhl : 41.0 # mean tth angle of VD module (default is 0.0)
rhr : 41.0 # mean tth angle of VU module (default is 0.0)
rhb : 121.8 # mean tth angle of VB module (default is 0.0)
# roiaddress=None
# roiaddress = mydata["roiaddress"]
# filename, groupname = split_hdf5_address (roiaddress)
# file= h5py.File(filename,"r")
# rois = {}
# shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
# file.close()
#
"""
# roiob = xrs_rois.roi_object()
# roiob.load_rois_fromMasksDict(rois , newshape = shape, kind="zoom")
roiaddress=None
roiaddress = mydata["roiaddress"]
# reader = xrs_read.read_id20(mydata["expdata"] , monitorcolumn='kapraman')
filename, groupname = split_hdf5_address (roiaddress)
file= h5py.File(filename,"r")
rois = {}
shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
file.close()
roiob = xrs_rois.roi_object()
roiob.load_rois_fromMasksDict(rois , newshape = shape, kind="zoom")
# reader.set_roiObj(roiob)
reader = xrs_read.read_id20(mydata["expdata"] , monitorcolumn='kapraman')
# elastic_scans = mydata["elastic_scans"][:]
# fine_scans = mydata["fine_scans"][:]
# n_loop = mydata["n_loop"]
# long_scan = mydata["long_scan"]
# reader.loadelasticdirect(elastic_scans)
# reader.loadloopdirect(fine_scans,n_loop)
# print( " LUNGO " )
# reader.loadlongdirect(long_scan)
reader.set_roiObj(roiob)
elastic_scans = mydata["elastic_scans"][:]
fine_scans = mydata["fine_scans"][:]
n_loop = mydata["n_loop"]
long_scan = mydata["long_scan"]
reader.loadelasticdirect(elastic_scans)
reader.loadloopdirect(fine_scans,n_loop)
print( " LUNGO " )
reader.loadlongdirect(long_scan)
# reader.getspectrum()
# reader.geteloss()
# reader.gettths(
# rvd = gvord(mydata,"rvd",0.0) ,
# rvu = gvord(mydata,"rvu",0.0) ,
# rvb = gvord(mydata,"rvb",0.0) ,
# rhl = gvord(mydata,"rhl",0.0) ,
# rhr = gvord(mydata,"rhr",0.0) ,
# rhb = gvord(mydata,"rhb",0.0) ,
# order = gvord(mydata,"order", [0,1,2,3,4,5])
# )
# groupname = groupname+"/"+ mydata["signaladdress"]
# check_libre( filename , groupname )
reader.getspectrum()
# reader.save_state_hdf5( filename, groupname , comment = inputtext )
reader.geteloss()
reader.gettths(
rvd = gvord(mydata,"rvd",0.0) ,
rvu = gvord(mydata,"rvu",0.0) ,
rvb = gvord(mydata,"rvb",0.0) ,
rhl = gvord(mydata,"rhl",0.0) ,
rhr = gvord(mydata,"rhr",0.0) ,
rhb = gvord(mydata,"rhb",0.0) ,
order = gvord(mydata,"order", [0,1,2,3,4,5])
)
groupname = groupname+"/"+ mydata["signaladdress"]
check_libre( filename , groupname )
reader.save_state_hdf5( filename, groupname , comment = inputtext )
# def volume_from_2Dimages(mydata):
# """
# imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved
# scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file
# roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0
# imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly.
def volume_from_2Dimages(mydata):
"""
imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved
# """
scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file
roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0
imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly.
# reader = xrs_imaging.oneD_imaging( "bidon" , "bidon", "bidon" , "bidon")
"""
# imagesaddress = mydata["imagesaddress"]
# filename, groupname = split_hdf5_address(imagesaddress)
reader = xrs_imaging.oneD_imaging( "bidon" , "bidon", "bidon" , "bidon")
# reader.load_state_hdf5( filename, groupname)
# scan_names = list( reader.twoDimages.keys() )
# scan_ids = map(int, [name[4:] for name in scan_names ] )
# order = np.argsort(scan_ids)
imagesaddress = mydata["imagesaddress"]
filename, groupname = split_hdf5_address(imagesaddress)
# if not ('scan_interval') in mydata :
# scan_names = [ scan_names[id] for id in order ]
# else:
# scan_interval = mydata['scan_interval']
reader.load_state_hdf5( filename, groupname)
# print( order)
# print( scan_names)
# print( scan_interval)
# scan_names = [ scan_names[id] for id in order if scan_ids[id]>=scan_interval[0] and scan_ids[id]<scan_interval[1] ]
scan_names = list( reader.twoDimages.keys() )
scan_ids = map(int, [name[4:] for name in scan_names ] )
order = np.argsort(scan_ids)
# first_name = scan_names[0]
# roi_n=0
if not ('scan_interval') in mydata :
scan_names = [ scan_names[id] for id in order ]
else:
scan_interval = mydata['scan_interval']
print( order)
print( scan_names)
print( scan_interval)
scan_names = [ scan_names[id] for id in order if scan_ids[id]>=scan_interval[0] and scan_ids[id]<scan_interval[1] ]
first_name = scan_names[0]
roi_n=0
if not ('roi_n' in mydata ):
while(1):
shape = reader.twoDimages[first_name][roi_n].matrix.shape
if shape != (0,) :
break
roi_n+=1
else:
roi_n = mydata["roi_n"]
shape = reader.twoDimages[first_name][roi_n].matrix.shape
# if not ('roi_n' in mydata ):
# while(1):
# shape = reader.twoDimages[first_name][roi_n].matrix.shape
# if shape != (0,) :
# break
# roi_n+=1
# else:
# roi_n = mydata["roi_n"]
# shape = reader.twoDimages[first_name][roi_n].matrix.shape
Volume = np.zeros(( shape[0], shape[1] , len(scan_names) ))
# Volume = np.zeros(( shape[0], shape[1] , len(scan_names) ))
for i,scanname in enumerate(scan_names):
Volume[:,:,i] = reader.twoDimages[scanname][roi_n].matrix
# for i,scanname in enumerate(scan_names):
# Volume[:,:,i] = reader.twoDimages[scanname][roi_n].matrix
if ('volumeaddress') in mydata :
filename, groupname = split_hdf5_address( mydata['volumeaddress'] )
# if ('volumeaddress') in mydata :
# filename, groupname = split_hdf5_address( mydata['volumeaddress'] )
h5=h5py.File(filename,'a')
check_libre( h5 , groupname )
h5[groupname] = Volume
h5.close()
h5=None
else:
view_Volume_myavi_(Volume)
# h5=h5py.File(filename,'a')
# check_libre( h5 , groupname )
# h5[groupname] = Volume
# h5.close()
# h5=None
# else:
# view_Volume_myavi_(Volume)
def view_Volume_myavi(mydata):
"""
volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume.
"""
# def view_Volume_myavi(mydata):
# """
# volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume.
# """
filename, groupname = split_hdf5_address( mydata['volume_address'] )
h5=h5py.File(filename,'r')
Volume = h5[groupname] [:]
h5.close()
h5=None
# filename, groupname = split_hdf5_address( mydata['volume_address'] )
# h5=h5py.File(filename,'r')
# Volume = h5[groupname] [:]
# h5.close()
# h5=None
isolevel = mydata['isolevel']
opacity = mydata['opacity']
# isolevel = mydata['isolevel']
# opacity = mydata['opacity']
view_Volume_myavi_(Volume, isolevel, opacity)
# view_Volume_myavi_(Volume, isolevel, opacity)
def view_Volume_myavi_(V, isolevel, opacity) :
print( " IN view ")
src = mlab.pipeline.scalar_field(V)
mlab.pipeline.iso_surface(src, contours=[V.min()+isolevel*V.ptp(), ], opacity=opacity)
mlab.show()
src = mlab.pipeline.scalar_field(V)
mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0)
mlab.show()
# def view_Volume_myavi_(V, isolevel, opacity) :
# print( " IN view ")
# src = mlab.pipeline.scalar_field(V)
# mlab.pipeline.iso_surface(src, contours=[V.min()+isolevel*V.ptp(), ], opacity=opacity)
# mlab.show()
# src = mlab.pipeline.scalar_field(V)
# mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0)
# mlab.show()
def calculate_recenterings(mydata):
......@@ -683,6 +569,8 @@ def calculate_recenterings(mydata):
target: "recenterings.h5:/recenterings4rois"
#
"""
allowed_keys =["bariA","bariB","target", ]
check_allowed_keys(mydata, allowed_keys)
bariA = mydata["bariA"]
bariA_filename, bariA_groupname = split_hdf5_address( bariA )
......@@ -736,234 +624,229 @@ def calculate_recenterings(mydata):
h5f = None
def sum_scans2maps(mydata):
roiaddress=None
roiaddress = mydata["mask_file"]
filename, groupname = split_hdf5_address( roiaddress)
file= h5py.File(filename,"r")
rois = {}
shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
file.close()
specfile_name = mydata["spec_file"]
Scan_Variable = mydata["Scan_Variable"]
Motor_Variable = mydata["Motor_Variable"]
# def sum_scans2maps(mydata):
specfile = SpecIO.Specfile( specfile_name )
# roiaddress=None
# roiaddress = mydata["mask_file"]
# filename, groupname = split_hdf5_address( roiaddress)
dirname = os.path.dirname( specfile_name )
basename = os.path.basename( specfile_name )
# file= h5py.File(filename,"r")
# rois = {}
# shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
# file.close()
# specfile_name = mydata["spec_file"]
# Scan_Variable = mydata["Scan_Variable"]
# Motor_Variable = mydata["Motor_Variable"]
scans_infos = []
signals = []
s1 = int(mydata["first_scan"])
s2 = int(mydata["last_scan"])
# specfile = SpecIO.Specfile( specfile_name )
roi_names = list(rois.keys())
roi_list = [ rois[k] for k in roi_names ]
for i in range(s1,s2+1):
# print " SCAN lettura " , i
scan = specfile.select(str(i))
scan_data = scan.data()
# dirname = os.path.dirname( specfile_name )
# basename = os.path.basename( specfile_name )
scan_themotor = scan.motorpos( Motor_Variable )
scan_othermotors = [ scan.motorpos( name ) for name in scan.allmotors() if name != Motor_Variable ]
othermotorsname = [name for name in scan.allmotors() if name != Motor_Variable]
# scans_infos = []
# signals = []
labels = scan.alllabels()
scan_variable = scan_data[ labels.index( Scan_Variable ) , :]
scan_ccdnos = scan_data[ labels.index( "ccdno" ) , :].astype("i")
signal = []
for no in scan_ccdnos:
print( " opening image ", os.path.join( dirname , "edf", basename+"_"+str(no)+".edf"))
data = fabio.open( os.path.join( dirname , "edf", basename+"_"+str(no)+".edf" ) ).data
tok = [ (data[corner[0]:corner[0]+mask.shape[0], corner[1]:corner[1]+mask.shape[1]]*mask).sum() for corner, mask in roi_list ]
signal.append(tok)
# print " OK "
# s1 = int(mydata["first_scan"])
# s2 = int(mydata["last_scan"])
# roi_names = list(rois.keys())
# roi_list = [ rois[k] for k in roi_names ]
# print signal
# print " Appendo " , signal
# for i in range(s1,s2+1):
# # print " SCAN lettura " , i
# scan = specfile.select(str(i))
# scan_data = scan.data()
# scan_themotor = scan.motorpos( Motor_Variable )
# scan_othermotors = [ scan.motorpos( name ) for name in scan.allmotors() if name != Motor_Variable ]
signals.append(np.array(signal))
# print "OK "
scans_infos.append( [scan_themotor, scan_othermotors, scan_variable ] )
# othermotorsname = [name for name in scan.allmotors() if name != Motor_Variable]
# print " DONE scan " , i
# labels = scan.alllabels()
# scan_variable = scan_data[ labels.index( Scan_Variable ) , :]
# scan_ccdnos = scan_data[ labels.index( "ccdno" ) , :].astype("i")
# signal = []
# for no in scan_ccdnos:
# print( " opening image ", os.path.join( dirname , "edf", basename+"_"+str(no)+".edf"))
# data = fabio.open( os.path.join( dirname , "edf", basename+"_"+str(no)+".edf" ) ).data
# tok = [ (data[corner[0]:corner[0]+mask.shape[0], corner[1]:corner[1]+mask.shape[1]]*mask).sum() for corner, mask in roi_list ]
# signal.append(tok)
# # print " OK "
done = np.zeros( len(scans_infos) ,"i")
DONE={}
synthes = {}
for kscan in range(len(scans_infos) ):
# print " kscan " , kscan
DONE[kscan]=0
if done[kscan]:
continue
else:
res = np.array(signals[kscan])
done[kscan]=1
kinfos = scans_infos[kscan]
kM, kOM, kV = kinfos
# # print signal
# # print " Appendo " , signal
for oscan in range(len(scans_infos)) :
# print " oscan " , oscan
if done[oscan]:
continue
else:
oinfos = scans_infos[oscan]
oM, oOM, oV = oinfos
# signals.append(np.array(signal))
# # print "OK "
# scans_infos.append( [scan_themotor, scan_othermotors, scan_variable ] )
if kM==oM:
if True or (np.abs(np.array(kOM)-np.array(oOM)).sum()==0.0):
print( " SONO UGUALI " )
if len(oV)== len(kV) :
# if np.abs(kV-oV).sum()==0.0:
print( " AGGIUNGO " )
res = res+np.array(signals[oscan])
done[oscan]=1
# # print " DONE scan " , i
# done = np.zeros( len(scans_infos) ,"i")
# DONE={}
# synthes = {}
# for kscan in range(len(scans_infos) ):
# # print " kscan " , kscan
# DONE[kscan]=0
# if done[kscan]:
# continue
# else:
# res = np.array(signals[kscan])
print( " AGGIUNGO " , kM, len(kV), len(kOM))
synthes[kscan] = [ kM, kV, kOM, res ]