Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ixstools
xrstools
Commits
6d9a61cf
Commit
6d9a61cf
authored
Dec 02, 2020
by
myron
Browse files
small cleaning
parent
35220b5a
Changes
1
Hide whitespace changes
Inline
Side-by-side
XRStools/XRS_swissknife.py
View file @
6d9a61cf
...
...
@@ -237,7 +237,6 @@ class Loader_map_as_anydict( object):
class
myOrderedDict
(
collections
.
OrderedDict
):
def
__setitem__
(
self
,
a
,
b
):
## print "cucu",a,b
if
type
(
a
)
==
type
(
""
)
and
a
in
self
:
self
[
a
+
"_tagkajs"
]
=
b
else
:
...
...
@@ -307,7 +306,6 @@ def main():
mydata
=
yamlData
[
key
]
if
isinstance
(
mydata
,
dict
)
and
"active"
in
mydata
:
if
mydata
[
"active"
]
==
0
:
# print " continuo "
continue
if
key
!=
"help"
:
...
...
@@ -582,7 +580,7 @@ def calculate_recenterings(mydata):
target_filename
,
target_groupname
=
split_hdf5_address
(
target
)
print
(
bariA_filename
,
bariA_groupname
)
print
(
" OPENIN FILE FOR RECENTERING "
)
print
(
" OPENIN FILE
"
,
bariA_filename
,
"
FOR RECENTERING "
)
h5A_f
=
h5py
.
File
(
bariA_filename
,
"r"
)
h5A
=
h5A_f
[
bariA_groupname
]
if
bariB_filename
==
bariA_filename
:
...
...
@@ -956,8 +954,6 @@ def loadscan_2Dimages(mydata):
else
:
isolateSpot
=
0
print
(
" creo oggetto "
,
energycolumn
)
print
(
" DIVIDER "
,
monitor_divider
)
reader
=
xrs_imaging
.
oneD_imaging
(
mydata
[
"expdata"
]
,
monitorcolumn
=
monitorcolumn
,
monitor_divider
=
monitor_divider
,
energycolumn
=
energycolumn
,
edfName
=
edfName
,
sumto1D
=
sumto1D
,
...
...
@@ -978,12 +974,8 @@ def loadscan_2Dimages(mydata):
mytodo
=
np
.
array_split
(
todo_list
,
nprocs
)
[
myrank
]
print
(
mytodo
)
print
(
" Process "
,
myrank
,
" is going to read the following scans "
,
mytodo
)
maxvalue
=
0.0
if
(
len
(
mytodo
)):
maxvalue
=
reader
.
loadscan_2Dimages
(
list
(
mytodo
)
,
scantype
=
energycolumn
,
isolateSpot
=
isolateSpot
)
...
...
@@ -995,7 +987,7 @@ def loadscan_2Dimages(mydata):
raise
Exception
(
"When using recentering with refinement parallelism cannote be used"
)
if
os
.
path
.
exists
(
recenterings_confirmed_filename
):
check_libre
(
recenterings_confirmed_filename
,
recenterings_confirmed_groupname
)
print
(
" APRO IN MODO a "
,
recenterings_confirmed_filename
)
h5f
=
h5py
.
File
(
recenterings_confirmed_filename
,
"a"
)
else
:
h5f
=
h5py
.
File
(
recenterings_confirmed_filename
,
"w"
)
...
...
@@ -1031,7 +1023,7 @@ def loadscan_2Dimages(mydata):
comm
.
Barrier
()
if
myrank
==
0
:
print
(
" TRASCRIVO "
)
if
save_also_roi
==
"for_resynth"
:
myfile
=
h5py
.
File
(
filename
,
'r+'
)
myfile
[
os
.
path
.
join
(
groupname
,
"image"
)]
=
h5py
.
SoftLink
(
os
.
path
.
join
(
os
.
path
.
dirname
(
groupname
[:
-
1
])
,
"rois_definition/image"
)
)
...
...
@@ -1078,7 +1070,6 @@ def loadscan_2Dimages_galaxies(mydata):
shape
,
image
=
xrs_rois
.
load_rois_fromh5
(
file
[
groupname
],
rois
,
retrieveImage
=
True
)
file
.
close
()
print
(
" carico maschere "
)
roiob
=
xrs_rois
.
roi_object
()
roiob
.
load_rois_fromMasksDict
(
rois
,
newshape
=
shape
,
kind
=
"zoom"
)
roiob
.
input_image
=
image
...
...
@@ -1127,14 +1118,13 @@ def loadscan_2Dimages_galaxies(mydata):
averaged_monitor
+=
monitor
averaged_monitor
=
averaged_monitor
/
len
(
todo_list
)
for
iscan
in
todo_list
:
iZ
=
(
iscan
-
scan_interval
[
0
])
%
Zdim
iY
=
(
iscan
-
scan_interval
[
0
])
//
Zdim
filename
,
dataname
=
split_hdf5_address
(
mydata
[
"expdata"
]
%
iscan
)
print
(
" working on "
,
filename
,
dataname
)
data
=
np
.
array
(
h5py
.
File
(
filename
,
"r"
)[
dataname
][:])
...
...
@@ -1157,7 +1147,7 @@ def loadscan_2Dimages_galaxies(mydata):
for
iE
in
range
(
Edim
):
egroup
=
"E%d/"
%
iE
scangroup
=
"Scan%d/"
%
iZ
print
(
" iE iY, shape"
,
iE
,
iY
,
sliced
.
shape
,
hf
[
egroup
+
scangroup
+
roigroup
+
"matrix"
].
shape
)
hf
[
egroup
+
scangroup
+
roigroup
+
"matrix"
][
iY
]
=
sliced
[
iE
]
hf
.
close
()
...
...
@@ -1195,7 +1185,6 @@ def loadscan_2Dimages_galaxies_foilscan(mydata):
shape
,
image
=
xrs_rois
.
load_rois_fromh5
(
file
[
groupname
],
rois
,
retrieveImage
=
True
)
file
.
close
()
print
(
" carico maschere "
)
roiob
=
xrs_rois
.
roi_object
()
roiob
.
load_rois_fromMasksDict
(
rois
,
newshape
=
shape
,
kind
=
"zoom"
)
roiob
.
input_image
=
image
...
...
@@ -1391,11 +1380,11 @@ def extract_spectra(mydata):
raise
ValueError
(
"Key %s not present in file %s"
%
(
reference_groupname
,
reference_file
)
)
h5
=
h5f
[
reference_groupname
]
print
(
" FILTRO "
,
list
(
h5
.
keys
())
)
rois_keys
=
filterRoiList
(
h5
.
keys
(),
prefix
=
""
)
print
(
reference_file
,
reference_groupname
,
list
(
h5
.
keys
()
)
)
print
(
"CONFRONTO roiskeys "
,
rois_keys
,
rois_keys_orig
)
rois_keys
=
list
(
set
.
intersection
(
set
(
rois_keys
),
set
(
rois_keys_orig
)
)
)
printf
(
" After filtering the list of rois to be used is "
,
rois_keys
)
incidentE
=
None
if
"motorDict/energy"
in
h5
:
...
...
@@ -1407,7 +1396,7 @@ def extract_spectra(mydata):
else
:
mm
=
None
print
(
reference_file
,
reference_groupname
,
list
(
h5
[
k
].
keys
()
)
)
zscale
=
h5
[
k
][
"xscale"
][()]
*
1000.0
mask
=
h5rois
[
"ROI%02d"
%
int
(
k
)][
"mask"
][:]
cummask
=
np
.
cumsum
(
mask
,
axis
=
0
)
...
...
@@ -1446,17 +1435,14 @@ def extract_spectra(mydata):
for
scan_num
,
extra
in
zip
(
scans
,
extratags
)
:
sample
=
{}
scan_name
=
"scans/Scan%03d"
%
scan_num
print
(
" FILE was "
,
sample_file
)
print
(
" sample_groupname "
,
sample_groupname
)
print
(
" scan_name "
,
scan_name
)
h5
=
h5_sample_group
[
scan_name
]
scan_energy_0
=
h5
[
"motorDict/energy"
][()]
print
(
" ROISKEYS "
,
rois_keys
)
denominator
=
h5
[
rois_keys
[
0
]
][
"monitor"
][()]
/
(
float
(
h5
[
rois_keys
[
0
]
][
"monitor_divider"
][()]))
for
k
in
rois_keys
:
print
(
" KKKKK "
,
k
)
mm
=
h5
[
k
][
"matrix"
][:]
zscale
=
h5
[
k
][
"xscale"
][:]
*
1000
...
...
@@ -1722,8 +1708,7 @@ def create_rois(mydata):
w4r
=
roiSelectionWidget
.
mainwindow
(
layout
=
layout
)
if
image4roi
is
not
None
:
if
filterMask
is
not
None
:
print
(
image4roi
)
print
(
filterMask
)
image4roi
=
image4roi
*
filterMask
w4r
.
showImage
(
image4roi
,
xrs_rois
.
get_geo_informations
(
image4roi
.
shape
+
(
layout
,)
))
...
...
@@ -1736,7 +1721,7 @@ def create_rois(mydata):
w4r
.
show
()
app
.
exec_
()
print
(
" USCITA "
,
w4r
.
isOK
)
if
not
w4r
.
isOK
:
sys
.
stderr
.
write
(
'ROI CREATION SEEMS TO HAVE BEEN STOPPED BY USER'
)
sys
.
exit
(
1
)
...
...
@@ -1860,7 +1845,7 @@ def create_rois_galaxies(mydata):
w4r
.
show
()
app
.
exec_
()
print
(
" USCITA "
,
w4r
.
isOK
)
if
not
w4r
.
isOK
:
sys
.
stderr
.
write
(
'ROI CREATION SEEMS TO HAVE BEEN STOPPED BY USER'
)
sys
.
exit
(
1
)
...
...
@@ -2376,7 +2361,7 @@ def superR_getVolume_fullfit(mydata):
solution
=
None
else
:
solution_address
=
str
(
mydata
[
"optional_solution"
])
print
(
"solution_address "
,
solution_address
)
if
solution_address
==
"None"
or
solution_address
is
None
or
solution_address
.
strip
()
==
""
:
solution
=
None
else
:
...
...
@@ -2406,7 +2391,7 @@ def superR_getVolume_fullfit(mydata):
if
XDIM
is
None
:
XDIM
=
m
.
shape
[
0
]
else
:
assert
(
XDIM
==
m
.
shape
[
0
])
assert
(
XDIM
==
m
.
shape
[
0
])
,
"The probes ( references) dont have the same X lenght ( scan lenght). One is %s, anoter other %s "
%
(
XDIM
,
m
.
shape
[
0
]
)
## DELTA <<<<<<<<<<<<<<<<<<<<<
## #############################
h5f
.
close
()
...
...
@@ -2437,7 +2422,9 @@ def superR_getVolume_fullfit(mydata):
if
ro
in
h5
[
zscan_keys
[
0
]]:
m
=
h5
[
zscan_keys
[
0
]][
ro
][
"matrix"
][:]
if
YDIM
is
not
None
:
assert
(
YDIM
==
m
.
shape
[
0
])
## we take the Y lenght from the first roi of the first scan :
assert
(
YDIM
==
m
.
shape
[
0
]),
"The probes ( references) dont have the same X lenght ( scan lenght). One is %s, anoter other %s "
%
(
XDIM
,
m
.
shape
[
0
]
)
## we take the Y lenght from the first roi of the first scan :
## this lenght is supposed to be the same are supposed to be the same for all scans
else
:
YDIM
=
m
.
shape
[
0
]
...
...
@@ -2446,7 +2433,7 @@ def superR_getVolume_fullfit(mydata):
del
sonde
[
ro
]
del
integrated_images
[
ro
]
for
ro
in
rois_to_be_removed
:
print
(
" RIMUOVO "
,
ro
)
roi_keys
.
remove
(
ro
)
if
YDIM
is
None
:
...
...
@@ -2531,9 +2518,6 @@ def superR_getVolume_fullfit(mydata):
else
:
h5
=
h5py
.
File
(
target_filename
,
"w"
)
print
(
h5
.
keys
())
print
(
target_groupname
)
print
(
target_groupname
in
h5
)
if
target_groupname
in
h5
:
del
h5
[
target_groupname
]
...
...
@@ -2568,7 +2552,6 @@ def superR_getVolume_Esynt(mydata):
scalprods_filename
,
scalprods_groupname
=
split_hdf5_address
(
scalprods_address
)
print
(
" GROUPNAME "
,
scalprods_groupname
)
output_prefix
=
mydata
[
"output_prefix"
]
...
...
@@ -2579,8 +2562,7 @@ def superR_getVolume_Esynt(mydata):
vkeys
=
list
(
h5
.
keys
())
vkeys
.
sort
()
print
(
vkeys
)
print
(
h5
[
vkeys
[
0
]][
"scal_prods"
].
keys
())
DS
=
[]
DD
=
[]
...
...
@@ -2596,17 +2578,16 @@ def superR_getVolume_Esynt(mydata):
roi_keys
=
h5
[
k
][
"scal_prods"
][
"roi_keys"
][()]
else
:
tmp
=
h5
[
k
][
"scal_prods"
][
"roi_keys"
][()]
assert
abs
((
tmp
-
roi_keys
)).
sum
()
==
0
assert
abs
((
tmp
-
roi_keys
)).
sum
()
==
0
DS
=
np
.
array
(
DS
,
"f"
)
DD
=
np
.
array
(
DD
,
"f"
)
SS
=
np
.
array
(
SS
,
"f"
)
print
(
" ROI_keys "
,
roi_keys
)
h5f
.
close
()
NV
,
NROI
,
DIMZ
,
DIMY
,
DIMX
=
DS
.
shape
print
(
NV
,
NROI
,
DIMZ
,
DIMY
,
DIMX
)
print
(
" NV, NROI, DIMZ,DIMY,DIMX "
,
NV
,
NROI
,
DIMZ
,
DIMY
,
DIMX
)
print
(
" DS SHAPE "
,
DS
.
shape
)
print
(
" DD SHAPE "
,
DD
.
shape
)
...
...
@@ -2626,15 +2607,13 @@ def superR_getVolume_Esynt(mydata):
roi_map
[
str
(
k
)]
=
i
print
(
" ROIMAP "
,
roi_map
)
id
=
interpolation_dict
for
iE
in
range
(
NE
):
ide
=
id
[
str
(
iE
)][
"coefficients"
]
for
iv
,
vk
in
enumerate
(
vkeys
):
idev
=
ide
[
vk
]
used_rois
=
list
(
idev
.
keys
())
print
(
"used rois "
,
used_rois
)
for
rk
,
c
in
idev
.
items
():
if
(
rk
in
roi_map
):
coefficients
[
iE
,
iv
,
roi_map
[
rk
]]
=
c
...
...
@@ -2746,9 +2725,7 @@ def superR_getVolume(mydata):
else
:
h5
=
h5py
.
File
(
target_filename
,
"w"
)
print
(
h5
.
keys
())
print
(
target_groupname
)
print
(
target_groupname
in
h5
)
if
target_groupname
in
h5
:
del
h5
[
target_groupname
]
...
...
@@ -2867,7 +2844,7 @@ def superR_scal_deltaXimages(mydata):
solution
=
None
else
:
solution_address
=
str
(
mydata
[
"optional_solution"
])
print
(
"solution_address "
,
solution_address
)
if
solution_address
==
"None"
or
solution_address
is
None
or
solution_address
.
strip
()
==
""
:
solution
=
None
else
:
...
...
@@ -2916,7 +2893,7 @@ def superR_scal_deltaXimages(mydata):
if
XDIM
is
None
:
XDIM
=
m
.
shape
[
0
]
else
:
assert
(
XDIM
==
m
.
shape
[
0
])
assert
(
XDIM
==
m
.
shape
[
0
])
,
"The probes ( references) dont have the same X lenght ( scan lenght). One is %s, anoter other %s "
%
(
XDIM
,
m
.
shape
[
0
]
)
## DELTA <<<<<<<<<<<<<<<<<<<<<
## #############################
h5f
.
close
()
...
...
@@ -2970,8 +2947,8 @@ def superR_scal_deltaXimages(mydata):
m
=
h5
[
zscan_keys
[
0
]][
ro
][
"matrix"
][:]
if
YDIM
is
not
None
:
assert
(
YDIM
==
m
.
shape
[
0
])
## we take the Y lenght from the first roi of the first scan :
## this lenght is supposed to be the same are supposed to be the same for all scans
assert
(
YDIM
==
m
.
shape
[
0
])
,
" "
else
:
YDIM
=
m
.
shape
[
0
]
else
:
...
...
@@ -2981,7 +2958,7 @@ def superR_scal_deltaXimages(mydata):
for
ro
in
rois_to_be_removed
:
print
(
" RIMUOVO "
,
ro
)
roi_keys
.
remove
(
ro
)
if
YDIM
is
None
:
...
...
@@ -3087,7 +3064,7 @@ def superR_scal_deltaXimages(mydata):
new_roi_keys
=
[]
for
ok
in
roi_keys
:
if
ok
not
in
my_roi_keys
:
print
(
" MANCA "
,
ok
,
end
=
{
False
:
""
,
True
:
"
\n
"
}
[
ok
==
roi_keys
[
-
1
]]
)
del
integrated_images
[
ok
]
else
:
new_roi_keys
.
append
(
ok
)
...
...
@@ -3143,14 +3120,14 @@ def superR_scal_deltaXimages(mydata):
if
nprocs
>
1
:
for
n
in
list
(
integrated_images
.
keys
()):
if
myrank
:
print
(
myrank
,
"A "
,
integrated_images
[
n
][
0
].
dtype
,
integrated_images
[
n
][
0
].
shape
)
comm
.
Reduce
([
integrated_images
[
n
][
0
],
MPI
.
DOUBLE
],
None
,
op
=
MPI
.
SUM
,
root
=
0
)
print
(
myrank
,
"B "
,
integrated_images
[
n
][
1
].
dtype
,
integrated_images
[
n
][
1
].
shape
)
comm
.
Reduce
([
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
],
None
,
op
=
MPI
.
SUM
,
root
=
0
)
else
:
print
(
myrank
,
" A "
,
integrated_images
[
n
][
0
].
dtype
,
integrated_images
[
n
][
0
].
shape
)
comm
.
Reduce
(
[
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
]
,
[
integrated_images
[
n
][
0
],
MPI
.
DOUBLE
],
op
=
MPI
.
SUM
,
root
=
0
)
print
(
myrank
,
" B "
,
integrated_images
[
n
][
1
].
dtype
,
integrated_images
[
n
][
1
].
shape
)
comm
.
Reduce
(
[
np
.
array
(
integrated_images
[
n
][
1
]),
MPI
.
DOUBLE
],
[
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
],
op
=
MPI
.
SUM
,
root
=
0
)
...
...
@@ -3181,13 +3158,13 @@ def superR_scal_deltaXimages(mydata):
h5
[
"scalDD"
][:]
=
0
for
n
in
list
(
integrated_images
.
keys
()):
print
(
" in key "
,
n
)
B
=
integrated_images
[
n
][
1
]
A
=
integrated_images
[
n
][
0
]
# B=B.sum(axis=0)
pesiA
=
A
.
sum
(
axis
=
0
)
pesiB
=
B
.
sum
(
axis
=
0
)
## print(" pesi ", pesiA, pesiB)
medieA
=
(
np
.
arange
(
A
.
shape
[
0
])[:,
None
]
*
A
).
sum
(
axis
=
0
)
/
pesiA
medieB
=
(
np
.
arange
(
B
.
shape
[
0
])[:,
None
]
*
B
).
sum
(
axis
=
0
)
/
pesiB
...
...
@@ -3441,7 +3418,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
for
ro
in
rois_to_be_removed
:
print
(
" RIMUOVO "
,
ro
)
roi_keys
.
remove
(
ro
)
if
YDIM
is
None
:
...
...
@@ -3460,7 +3437,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
new_roi_keys
=
[]
for
ok
in
roi_keys
:
if
ok
not
in
my_roi_keys
:
print
(
" MANCA "
,
ok
,
end
=
{
False
:
""
,
True
:
"
\n
"
}
[
ok
==
roi_keys
[
-
1
]]
)
del
integrated_images
[
ok
]
else
:
new_roi_keys
.
append
(
ok
)
...
...
@@ -3480,7 +3457,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
for
i
,
rk
in
enumerate
(
roi_keys
):
if
rk
in
loaded_fattori
:
print
(
" FATTORE "
,
rk
,
" "
,
loaded_fattori
[
rk
]
)
fattori
[
rk
]
*=
loaded_fattori
[
rk
]
else
:
fattori
[
rk
]
=
0
...
...
@@ -3497,7 +3474,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
scalSS
=
np
.
zeros
(
[
Nrois
,
XDIM
,
XDIM
]
,
"d"
)
for
i
,
rk
in
enumerate
(
roi_keys
):
print
(
" ================= "
,
i
,
rk
)
if
i
%
nprocs
==
myrank
:
probes
=
sonde
[
rk
]
## Consider that, below, factor is a factor which is applied to the probe to better adapt it to the sample strenght
...
...
@@ -3553,14 +3530,14 @@ def superR_scal_deltaXimages_Esynt(mydata):
if
nprocs
>
1
:
for
n
in
list
(
integrated_images
.
keys
()):
if
myrank
:
print
(
myrank
,
"A "
,
integrated_images
[
n
][
0
].
dtype
,
integrated_images
[
n
][
0
].
shape
)
comm
.
Reduce
([
integrated_images
[
n
][
0
],
MPI
.
DOUBLE
],
None
,
op
=
MPI
.
SUM
,
root
=
0
)
print
(
myrank
,
"B "
,
integrated_images
[
n
][
1
].
dtype
,
integrated_images
[
n
][
1
].
shape
)
comm
.
Reduce
([
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
],
None
,
op
=
MPI
.
SUM
,
root
=
0
)
else
:
print
(
myrank
,
" A "
,
integrated_images
[
n
][
0
].
dtype
,
integrated_images
[
n
][
0
].
shape
)
comm
.
Reduce
(
[
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
]
,
[
integrated_images
[
n
][
0
],
MPI
.
DOUBLE
],
op
=
MPI
.
SUM
,
root
=
0
)
print
(
myrank
,
" B "
,
integrated_images
[
n
][
1
].
dtype
,
integrated_images
[
n
][
1
].
shape
)
comm
.
Reduce
(
[
np
.
array
(
integrated_images
[
n
][
1
]),
MPI
.
DOUBLE
],
[
integrated_images
[
n
][
1
],
MPI
.
DOUBLE
],
op
=
MPI
.
SUM
,
root
=
0
)
...
...
@@ -3592,7 +3569,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
h5
[
"scalSS"
][:]
=
0
for
n
in
list
(
integrated_images
.
keys
()):
print
(
" in key "
,
n
)
B
=
integrated_images
[
n
][
1
]
A
=
integrated_images
[
n
][
0
]
# B=B.sum(axis=0)
...
...
@@ -3632,7 +3609,7 @@ def superR_scal_deltaXimages_Esynt(mydata):
h5f
.
require_group
(
target_groupname
)
h5
=
h5f
[
target_groupname
]
print
(
" MY KEYS "
,
roi_keys
)
if
myrank
==
0
:
h5
[
"roi_keys"
]
=
np
.
array
(
list
(
map
(
int
,
roi_keys
)))
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment