Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ixstools
xrstools
Commits
4e823e2b
Commit
4e823e2b
authored
Dec 16, 2020
by
myron
Browse files
finished with the Raffaela dataset and the new scheme
parent
85697144
Changes
8
Hide whitespace changes
Inline
Side-by-side
XRStools/XRS_swissknife.py
View file @
4e823e2b
...
@@ -374,7 +374,6 @@ but : was not found
...
@@ -374,7 +374,6 @@ but : was not found
filename
,
groupname
=
dataadress
[:
pos
],
dataadress
[
pos
+
1
:]
filename
,
groupname
=
dataadress
[:
pos
],
dataadress
[
pos
+
1
:]
if
(
len
(
groupname
)
and
groupname
[
0
:
1
]
!=
"/"
):
if
(
len
(
groupname
)
and
groupname
[
0
:
1
]
!=
"/"
):
groupname
=
"/"
+
groupname
groupname
=
"/"
+
groupname
print
(
" RITORNO "
,
groupname
,
(
groupname
[
0
:
1
]
!=
"/"
),
len
(
groupname
)
)
return
filename
,
groupname
return
filename
,
groupname
...
@@ -2836,7 +2835,6 @@ def superR_scal_deltaXimages(mydata):
...
@@ -2836,7 +2835,6 @@ def superR_scal_deltaXimages(mydata):
"""
"""
allowed_keys
=
[
"delta_address"
,
'orig_delta_address'
,
'nbin'
,
'optional_solution'
,
'roi_keys'
,
"sample_address"
,
'scan_interval'
,
"load_factors_from"
,
"save_factors_on"
,
"target_address"
,
]
allowed_keys
=
[
"delta_address"
,
'orig_delta_address'
,
'nbin'
,
'optional_solution'
,
'roi_keys'
,
"sample_address"
,
'scan_interval'
,
"load_factors_from"
,
"save_factors_on"
,
"target_address"
,
]
check_allowed_keys
(
mydata
,
allowed_keys
)
check_allowed_keys
(
mydata
,
allowed_keys
)
...
@@ -2880,8 +2878,8 @@ def superR_scal_deltaXimages(mydata):
...
@@ -2880,8 +2878,8 @@ def superR_scal_deltaXimages(mydata):
if
(
'roi_keys'
in
mydata
)
:
if
(
'roi_keys'
in
mydata
)
:
u_roi_keys
=
mydata
[
'roi_keys'
]
u_roi_keys
=
mydata
[
'roi_keys'
]
u_roi_keys
=
[
str
(
t
)
for
t
in
u_roi_keys
]
u_roi_keys
=
[
int
(
t
)
for
t
in
u_roi_keys
]
roi_keys
=
[
t
for
t
in
u_
roi_keys
if
t
in
roi_keys
]
roi_keys
=
[
t
for
t
in
roi_keys
if
int
(
t
)
in
u_
roi_keys
]
roi_keys
=
[
str
(
t
)
for
t
in
roi_keys
]
roi_keys
=
[
str
(
t
)
for
t
in
roi_keys
]
...
...
nonregressions/volumes/test3/batch_extraction_esynth1
.py
→
XRStools/tools_sequencer_esynth_galaxies
.py
View file @
4e823e2b
# newfactors e' gia pronto , dopo verifica fuzionalita di roi_sel in ximages
import
numpy
as
np
import
numpy
as
np
import
h5py
import
h5py
import
glob
import
glob
import
json
import
json
import
sys
BATCH_PARALLELISM
=
1
BATCH_PARALLELISM
=
1
import
os
import
os
def
main
():
os
.
system
(
"mkdir results"
)
peaks_shifts
=
np
.
array
(
[
0.0
,
0.0
,
0.0
,
0.0
,
])
-
0.0
dataprefix
=
"/data/raffaela/"
datadir
=
dataprefix
+
"GS9_dataGalaxies/"
filter_path
=
dataprefix
+
"mymask.h5:/mymask"
filename_template
=
"GS9_%05d_01"
data_path_template
=
datadir
+
filename_template
+
".nxs:/root_spyc_config1d_RIXS_00001/scan_data/data_07"
reference_data_path_template
=
dataprefix
+
"kapton_%05d_01"
+
".nxs:/root_spyc_config1d_RIXS_00001/scan_data/data_07"
monitor_path_template
=
None
# monitor_path_template = datadir + filename_template +"_monitor.hd5:/monitor"
print
(
datadir
+
(
filename_template
%
1
)
+
".nxs"
)
energy_exp_grid
=
h5py
.
File
(
datadir
+
(
filename_template
%
1
)
+
".nxs"
,
"r"
)[
"/root_spyc_config1d_RIXS_00001/scan_data/actuator_1_1"
][()]
energy_custom_grid
=
np
.
array
(
energy_exp_grid
)
energy_custom_grid
[
0
]
-=
0.1
energy_custom_grid
[
+
1
]
-=
0.1
scan_interval
=
[
1
,
257
]
# from 1 to 475 included
Ydim
=
16
Zdim
=
16
Edim
=
19
reference_scan_list
=
[
1
]
custom_components_file
=
None
# custom_components_file = "components.h5"
roi_scan_num
=
list
(
range
(
1
,
3
))
reference_clip
=
None
reference_clip
=
[
0
,
200
]
## in the reference scan for each position there is a spot with a maximum. We set zero the background which is further than
## such radius from the maximum
isolate_spot_by
=
7
#### For the fit of the response function based on reference scans
response_fit_options
=
dict
(
[
[
"niter_optical"
,
40
],
[
"beta_optical"
,
0.1
],
[
"niter_global"
,
1
]
])
resynth_z_square
=
0
selected_rois
=
list
(
range
(
0
,
4
)
)
steps_to_do
=
{
"do_step_make_roi"
:
False
,
"do_step_sample_extraction"
:
False
,
"do_step_extract_reference_scan"
:
False
,
"do_step_fit_reference_response"
:
False
,
"do_step_resynthetise_reference"
:
False
,
"do_step_scalar"
:
False
,
"do_step_interpolation_coefficients"
:
False
,
"do_step_finalise_for_fit"
:
True
}
scalar_products_target_file
=
"results/scalar_prods.h5"
roi_target_path
=
"results/myrois.h5:/ROIS"
reference_target_file
=
"results/response.h5"
signals_target_file
=
"results/extracted.h5"
extracted_reference_target_file
=
"results/reference.h5"
response_target_file
=
"results/response.h5"
interpolation_infos_file
=
"interpolation_infos.json"
resynthetised_reference_and_roi_target_file
=
"results/resynthetised_roi_and_scan.h5"
tools_sequencer
(
peaks_shifts
=
peaks_shifts
,
datadir
=
datadir
,
filter_path
=
filter_path
,
roi_scan_num
=
roi_scan_num
,
roi_target_path
=
roi_target_path
,
data_path_template
=
data_path_template
,
reference_data_path_template
=
reference_data_path_template
,
steps_to_do
=
steps_to_do
,
scan_interval
=
scan_interval
,
Ydim
=
Ydim
,
Zdim
=
Zdim
,
Edim
=
Edim
,
monitor_path_template
=
monitor_path_template
,
signals_target_file
=
signals_target_file
,
reference_scan_list
=
reference_scan_list
,
reference_clip
=
reference_clip
,
extracted_reference_target_file
=
extracted_reference_target_file
,
isolate_spot_by
=
isolate_spot_by
,
response_target_file
=
response_target_file
,
response_fit_options
=
response_fit_options
,
resynthetised_reference_and_roi_target_file
=
resynthetised_reference_and_roi_target_file
,
resynth_z_square
=
resynth_z_square
,
selected_rois
=
selected_rois
,
scalar_products_target_file
=
scalar_products_target_file
,
energy_custom_grid
=
energy_custom_grid
,
custom_components_file
=
custom_components_file
,
interpolation_infos_file
=
interpolation_infos_file
,
energy_exp_grid
=
energy_exp_grid
)
def
synthetise_response
(
scan_address
=
None
,
target_address
=
None
,
response_fit_options
=
None
def
synthetise_response
(
scan_address
=
None
,
target_address
=
None
,
response_fit_options
=
None
):
):
...
@@ -586,7 +459,6 @@ def reshuffle( volumefile = "volumes.h5", nick = None ):
...
@@ -586,7 +459,6 @@ def reshuffle( volumefile = "volumes.h5", nick = None ):
## THE FOLLOWING PART IS THE RELEVANT ONE
## THE FOLLOWING PART IS THE RELEVANT ONE
def
tools_sequencer
(
peaks_shifts
=
None
,
def
tools_sequencer
(
peaks_shifts
=
None
,
datadir
=
None
,
filter_path
=
None
,
filter_path
=
None
,
roi_scan_num
=
None
,
roi_scan_num
=
None
,
roi_target_path
=
None
,
roi_target_path
=
None
,
...
@@ -678,7 +550,7 @@ def tools_sequencer( peaks_shifts = None,
...
@@ -678,7 +550,7 @@ def tools_sequencer( peaks_shifts = None,
if
(
steps_to_do
[
"do_step_fit_reference_response"
]):
if
(
steps_to_do
[
"do_step_fit_reference_response"
]):
synthetise_response
(
synthetise_response
(
scan_address
=
extracted_reference_target_file
+
":Scan0"
,
scan_address
=
extracted_reference_target_file
+
":Scan0"
,
target_address
=
+
":/FIT"
,
target_address
=
response_target_file
+
":/FIT"
,
response_fit_options
=
response_fit_options
response_fit_options
=
response_fit_options
)
)
...
@@ -733,6 +605,3 @@ def tools_sequencer( peaks_shifts = None,
...
@@ -733,6 +605,3 @@ def tools_sequencer( peaks_shifts = None,
get_volume_Esynt
(
scalarprods_file
=
scalarprods_file
,
get_volume_Esynt
(
scalarprods_file
=
scalarprods_file
,
interpolation_file
=
interpolation_infos_file
)
interpolation_file
=
interpolation_infos_file
)
main
()
XRStools/tools_sequencer_esynth_galaxies.py~
0 → 100644
View file @
4e823e2b
def synthetise_response(scan_address=None, target_address=None, response_fit_options = None
):
input_string = """
superR_fit_responses :
foil_scan_address : "{scan_address}"
nref : 7 # the number of subdivision per pixel dimension used to
# represent the optical response function at higher resolution
niter_optical : {niter_optical} # the number of iterations used in the optimisation of the optical
# response
beta_optical : {beta_optical} # The L1 norm factor in the regularisation
# term for the optical functions
pixel_dim : 1 # The pixel response function is represented with a
# pixel_dim**2 array
niter_pixel : 10 # The number of iterations in the pixel response optimisation
# phase. A negative number stands for ISTA, positive for FISTA
beta_pixel : 0.0 # L1 factor for the pixel response regularisation
## The used trajectories are always written whith the calculated response
## They can be reloaded and used as initialization(and freezed with do_refine_trajectory : 0 )
## Uncomment the following line if you want to reload a set of trajectories
## without this options trajectories are initialised from the spots drifts
##
# reload_trajectories_file : "response.h5"
filter_rois : 0
######
## The method first find an estimation of the foil scan trajectory on each roi
## then, based on this, obtain a fit of the optical response function
## assuming a flat pixel response. Finally the pixel response is optimised
##
## There is a final phase where a global optimisation
## is done in niter_global steps.
##
## Each step is composed of optical response fit, followed by a pixel response fit.
## If do_refine_trajectory is different from zero, the trajectory is reoptimised at each step
##
niter_global : {niter_global}
## if do_refine_trajectory=1 the start and end point of the trajectory are free
## if =2 then the start and end point are forced to a trajectory which is obtained
## from a reference scan : the foil scan may be short, then one can use the scan of
## an object to get another one : key *trajectory_reference_scan_address*
##
do_refine_trajectory : 1
## optional: only if do_refine_trajectory = 2
trajectory_reference_scansequence_address : "demo_newrois.h5:/ROI_FOIL/images/scans/"
trajectory_threshold : 0.1
## if the pixel response function is forced to be symmetrical
simmetrizza : 1
## where the found responses are written
target_file : {target_address}
# target_file : "fitted_responses.h5"
"""
s=input_string.format( scan_address=scan_address ,
target_address=target_address,
niter_optical = response_fit_options[ "niter_optical"],
beta_optical=response_fit_options["beta_optical"],
niter_global=response_fit_options["niter_global"]
)
process_input( s , exploit_slurm_mpi = 1, stop_omp = True)
def resynthetise_scan(
old_scan_address= None,
response_file = None ,
target_address = None,
original_roi_path = None,
resynth_z_square = None
):
input_string = """
superR_recreate_rois :
### we have calculated the responses in responsefilename
### and we want to enlarge the scan by a margin of 3 times
### the original scan on the right and on the left
### ( so for a total of a 7 expansion factor )
responsefilename : {response_file}
nex : 0
## the old scan covered by the old rois
old_scan_address : {old_scan_address}
## where new rois and bnew scan are written
target_filename : {target_address}
filter_rois : 0
original_roi_path : {original_roi_path}
resynth_z_square : {resynth_z_square}
"""
s=input_string.format( response_file = response_file ,
target_address = target_address,
old_scan_address=old_scan_address,
original_roi_path = original_roi_path +"/rois_definition",
resynth_z_square = resynth_z_square)
process_input( s , exploit_slurm_mpi = 0, stop_omp = True)
def process_input(s, go=0, exploit_slurm_mpi = 0, stop_omp = False):
open("input_tmp_%d.par"%go, "w").write(s)
background_activator = ""
if (go % BATCH_PARALLELISM ):
background_activator = "&"
prefix=""
if stop_omp:
prefix = prefix +"export OMP_NUM_THREADS=1 ;"
if ( exploit_slurm_mpi==0 ):
comando = (prefix +"mpirun -n 1 XRS_swissknife input_tmp_%d.par %s"%(go, background_activator))
elif ( exploit_slurm_mpi>0 ):
comando = (prefix + "mpirun XRS_swissknife input_tmp_%d.par %s"%(go, background_activator) )
else:
comando = (prefix + "mpirun -n %d XRS_swissknife input_tmp_%d.par %s"%(abs( exploit_slurm_mpi ), go, background_activator) )
res = os.system( comando )
assert (res==0) , " something went wrong running command : " + comando
def select_rois( data_path_template=None, filter_path=None, roi_target_path=None, scans_to_use=None ):
inputstring = """
create_rois_galaxies :
expdata : {data_path_template}
filter_path : {filter_path}
roiaddress : {roi_target_path} # the target destination for rois
scans : {scans_to_use}
""" .format(data_path_template = data_path_template,
filter_path = filter_path,
roi_target_path = roi_target_path,
scans_to_use = scans_to_use
)
process_input( inputstring , exploit_slurm_mpi = 0 )
def extract_sample_givenrois(
roi_path = None,
data_path_template = None,
monitor_path_template = None,
scan_interval = None,
Ydim = None,
Zdim = None,
Edim = None,
signals_target_file = None
):
inputstring = """
loadscan_2Dimages_galaxies :
roiaddress : {roi_path}
expdata : {data_path_template}
monitor_address : {monitor_path_template}
scan_interval : {scan_interval}
Ydim : {Ydim}
Zdim : {Zdim}
Edim : {Edim}
signalfile : {signals_target_file}
""".format( roi_path = roi_path,
data_path_template = data_path_template,
monitor_path_template = monitor_path_template,
scan_interval = scan_interval,
Ydim = Ydim,
Zdim = Zdim,
Edim = Edim,
signals_target_file = signals_target_file)
process_input( inputstring, exploit_slurm_mpi = 0)
def InterpInfo_Esynt_components(peaks_shifts , energy_exp_grid = None, custom_ene_list = None, custom_components = None ):
components = h5py.File( custom_components ,"r")["components"] [()]
info_dict = {}
for i_interval in range(len(components)):
info_dict[str(i_interval)] = {}
info_dict[str(i_interval)]["E"] = custom_ene_list[ i_interval ]
info_dict[str(i_interval)]["coefficients"]={}
for i_n in range(len(energy_exp_grid)):
info_dict[str(i_interval)]["coefficients" ][ str(i_n) ]={}
for roi_num, de in enumerate( peaks_shifts ):
info_dict[str(i_interval)]["coefficients" ][ str(i_n) ][ str(roi_num) ] = 0
for ic in range(len(components)):
for i_interval in range(len(custom_ene_list)-1):
cE1 = custom_ene_list[ i_interval ]
cE2 = custom_ene_list[ i_interval+1 ]
for i_ene, t_ene in enumerate( energy_exp_grid) :
for roi_num, de in enumerate( peaks_shifts ):
if t_ene+de < cE1 or t_ene+de > cE2:
continue
alpha = (cE2-(t_ene+de) )/(cE2+cE1)
info_dict[str(ic)]["coefficients" ][ str(i_ene) ][ str(roi_num) ] += alpha * components[ic][ i_interval ]
info_dict[str(ic)]["coefficients" ][ str(i_ene) ][ str(roi_num) ] += (1-alpha)*components[ic][ i_interval+1 ]
return info_dict
def InterpInfo_Esynt( peaks_shifts , energy_exp_grid = None, custom_ene_list = None):
print(energy_exp_grid)
print(peaks_shifts)
info_dict = {"energy_exp_grid":list(energy_exp_grid), "de_list": list(peaks_shifts)}
N_custom = len(custom_ene_list)
N_data = len( energy_exp_grid )
for i_interval in range(len(custom_ene_list)):
info_dict[str(i_interval)] = {}
info_dict[str(i_interval)]["E"] = custom_ene_list[ i_interval ]
info_dict[str(i_interval)]["coefficients"]={}
for i_n in range(len(energy_exp_grid)):
info_dict[str(i_interval)]["coefficients" ][ str(i_n) ]={}
for roi_num, de in enumerate( peaks_shifts ):
info_dict[str(i_interval)]["coefficients" ][ str(i_n) ][ str(roi_num) ] = 0
for i_interval in range( N_custom -1):
cE1 = custom_ene_list[ i_interval ]
cE2 = custom_ene_list[ i_interval+1 ]
for i_ene, t_ene in enumerate( energy_exp_grid) :
for roi_num, de in enumerate( peaks_shifts ):
if t_ene+de < cE1 or t_ene+de > cE2:
continue
alpha = (cE2-(t_ene+de) )/(cE2-cE1)
info_dict[str(i_interval)]["coefficients" ][ str(i_ene) ][ str(roi_num) ] = alpha
info_dict[str(i_interval+1)]["coefficients"][ str(i_ene) ][ str(roi_num) ] = 1-alpha
return info_dict
def __init__(self, peaks_shifts, interp_file, source, custom_ene_list = None):
volum_list = list(interp_file[source].keys())
scan_num_list = np.array([ int( t.split("_") [1]) for t in volum_list])
ene_list = np.array([ interp_file[source][vn]["scans"]["Scan%03d"%sn ]["motorDict"]["energy"][()] for vn,sn in zip(volum_list, scan_num_list ) ])
print ( " ecco la scannumlist " , scan_num_list)
print (" ecco ene_list", ene_list)
self.volum_list = volum_list
self.scan_num_list = scan_num_list
self.ene_list = ene_list
order = np.argsort( self.ene_list )
self.ene_list = self.ene_list [order]
if custom_ene_list is None:
self.custom_ene_list = self.ene_list
else:
self.custom_ene_list = custom_ene_list
self.scan_num_list = self.scan_num_list [order]
self.volum_list = [ self.volum_list [ii] for ii in order ]
self.interp_file=interp_file
self.source= source
self.peaks_shifts=peaks_shifts
# info_dict={}
# for i in range(NC):
# dizio = {}
# info_dict[str(i)] = {"coefficients":dizio}
# c = model.components_[i]
# np = len(c)
# for j in range(np):
# dizio[str(j)] = float(c[j])
# json.dump(info_dict,open( interpolation_infos_file,"w"), indent=4)
def interpola_Esynt(self, roi_sel=roi_sel ):
print ( " ECCO I DATI ")
print ( self.ene_list )
print ( self.peaks_shifts )
info_dict = {}
for i_intervallo in range(len(self.custom_ene_list)):
info_dict[str(i_intervallo)] = {}
info_dict[str(i_intervallo)]["E"] = self.custom_ene_list[ i_intervallo ]
info_dict[str(i_intervallo)]["coefficients"]={}
for t_vn, t_sn, t_ene in list(zip(self.volum_list, self.scan_num_list, self.ene_list )):
info_dict[str(i_intervallo)]["coefficients" ][ t_vn ]={}
for i_intervallo in range(len(self.custom_ene_list)-1):
cE1 = self.custom_ene_list[ i_intervallo ]
cE2 = self.custom_ene_list[ i_intervallo+1 ]
for t_vn, t_sn, t_ene in list(zip(self.volum_list, self.scan_num_list, self.ene_list ))[0:]:
for roi_num, de in enumerate( self.peaks_shifts ):
if roi_num not in roi_sel:
continue
if t_ene+de < cE1 or t_ene+de > cE2:
continue
alpha = (cE2-(t_ene+de) )/(cE2-cE1)
info_dict[str(i_intervallo)]["coefficients" ][ str(t_vn) ][ str(roi_num) ] = alpha
info_dict[str(i_intervallo+1)]["coefficients"][ str(t_vn) ][ str(roi_num) ] = 1-alpha
return info_dict
def get_reference( roi_path = None,
data_path_template = None,
monitor_path_template = None ,
reference_scan_list = None,
extracted_reference_target_file = None,
isolate_spot_by = None
):
signal_path = extracted_reference_target_file + ":/"
input_string = """
loadscan_2Dimages_galaxies_foilscan :
roiaddress : {roi_path}
expdata : {data_path_template}
signalfile : "{extracted_reference_target_file}"
isolateSpot : {isolate_spot_by}
scan_list : {reference_scan_list}
"""
s=input_string.format(
data_path_template = data_path_template,
reference_scan_list = reference_scan_list,
roi_path = roi_path,
isolate_spot_by = isolate_spot_by,
signal_path = signal_path,
extracted_reference_target_file = extracted_reference_target_file
)
process_input( s , exploit_slurm_mpi = 0)
def get_scalars( iE = None,
signals_file = None,
reference_file = None,
target_file = None
):
inputstring = """
superR_scal_deltaXimages_Esynt :
sample_address : {signals_file}:/E{iE}
delta_address : {reference_file}:/rois_and_reference/Scan0
load_factors_from :
nbin : 1
target_address : {target_file}:/{iE}/scal_prods
""" . format( iE = iE,
signals_file = signals_file ,
reference_file = reference_file ,
target_file = target_file,
)
process_input( inputstring, exploit_slurm_mpi = 0)
def get_volume_Esynt( scalarprods_file = None, interpolation_file = None):