Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
ixstools
xrstools
Commits
00233a02
Commit
00233a02
authored
Dec 01, 2020
by
myron
Browse files
restructuring the volume extraction and xrs_swissknife
parent
23322b8e
Changes
4
Hide whitespace changes
Inline
Side-by-side
XRStools/XRS_swissknife.py
View file @
00233a02
...
...
@@ -191,6 +191,12 @@ from yaml.constructor import Constructor, MappingNode, ConstructorError
from
XRStools
import
fit_spectra
from
XRStools
import
reponse_percussionelle
def
check_allowed
(
mydata
,
allowed_keys
)
:
for
k
in
mydata
.
keys
():
if
not
k
in
allowed_keys
:
raise
ValueError
(
(
" key "
+
str
(
k
)
+
" not in allowed keys :"
:
+
str
(
allowed_keys
))
)
def
dump_anydict_as_map
(
anydict
):
yaml
.
add_representer
(
anydict
,
_represent_dictorder
)
...
...
@@ -352,81 +358,6 @@ def help(yamlData):
print
(
func
.
__doc__
)
def
Extraction
(
mydata
):
"""
**Extraction**
Function to extract the interesting signal after removal of Compton profile,
linear baselines, Pearson profile
Example ::
Extraction :
active : 1
dataadress : "pippo.h5:/ROI_A/loaded_datas" # where load_scans wrote data
HFaddress : "pippo.h5:/ROI_A/loaded_datas/HF_O" # where compton profiles have been calculated
# prenormrange : [ 5 , .inf ]
analyzerAverage : # averaging over analysers
active : 1
which : [0,11 , 36,59 ]
errorweighing : False
removeLinearAv : # fit a linear baseline and remove it
active : 1
region1 : [520.0,532.0]
region2 : None
ewindow : 100
scale : 1
removePearsonAv: # fit a Pearson and remove it
active : 0
region1 : [520.0,532.0]
region2 : None
guess :
Peak_position : 600.0
FWHM : 10
Shape : "Lorentzian"
Peak_intensity: 100.0
linear_slope : 1
linear_background : 0
scaling_factor : 1
view : 0
target : "myextraction" # path relative to dataadress where extracted signal will be written
"""
reader
,
filename
,
groupname
=
read_reader
(
mydata
,
name
=
"dataadress"
)
HF
=
read_HF
(
mydata
,
name
=
"hfspectrum_address"
)
extr
=
extraction
.
extraction
(
reader
,
HF
)
if
(
"analyzerAverage"
)
in
mydata
:
aa_data
=
mydata
[
"analyzerAverage"
]
if
gvord
(
aa_data
,
"active"
,
True
):
which
=
aa_data
[
"which"
]
errorweighing
=
gvord
(
aa_data
,
"errorweighing"
,
False
)
extr
.
analyzerAverage
(
which
,
errorweighing
=
errorweighing
)
if
(
"removeLinearAv"
)
in
mydata
:
rla_data
=
mydata
[
"removeLinearAv"
]
if
gvord
(
rla_data
,
"active"
,
True
):
region1
=
rla_data
[
"region1"
]
region2
=
gvord
(
rla_data
,
"region2"
,
None
)
ewindow
=
gvord
(
rla_data
,
"ewindow"
,
100
)
scale
=
gvord
(
rla_data
,
"scale"
,
100
)
extr
.
removeLinearAv
(
region1
,
region2
=
region2
,
ewindow
=
ewindow
,
scale
=
scale
,
view
=
gvord
(
mydata
,
"view"
,
False
),
)
print
(
gvord
(
mydata
,
"view"
,
False
))
groupname
=
groupname
+
"/"
+
mydata
[
"target"
]
check_libre
(
filename
,
groupname
)
extr
.
save_state_hdf5
(
filename
,
groupname
,
comment
=
inputtext
)
def
split_hdf5_address
(
dataadress
):
...
...
@@ -440,233 +371,188 @@ but : was not found
return
filename
,
groupname
def
read_HF
(
mydata
,
name
=
"hfspectrum_address"
):
dataadress
=
mydata
[
name
]
filename
,
groupname
=
split_hdf5_address
(
dataadress
)
HF
=
theory
.
HFspectrum
(
None
,
None
,
None
,
initialise
=
False
)
HF
.
load_state_hdf5
(
filename
,
groupname
)
return
HF
def
HFspectrum
(
mydata
):
"""
**HFspectrum**
function for building S(q,w) from tabulated Hartree-Fock Compton profiles to use in
the extraction algorithm.
EXAMPLE ::
dataadress : "hdf5filename:full_nameof_signals_group" # where load_scans wrote data
formulas : ['O'] # list of strings of chemical sum formulas of which the sample is made up
concentrations : [1.0] # list of concentrations of how the different chemical formulas are mixed (sum should be 1)
correctasym : [[0.0,0.0,0.0]] # single value or list of scaling values for the HR-correction to
# the 1s, 2s, and 2p shells. one value per element in the list of formulas
hfspectrum_address : "nameofgroup" # Target group for writing Relative to dataadress (and in the same file)!!!!
"""
reader
,
filename
,
groupname
=
read_reader
(
mydata
,
name
=
"dataadress"
)
hf
=
theory
.
HFspectrum
(
reader
,
mydata
[
"formulas"
]
,
mydata
[
"concentrations"
]
,
mydata
[
"correctasym"
]
)
groupname
=
groupname
+
"/"
+
mydata
[
"hfspectrum_address"
]
check_libre
(
filename
,
groupname
)
hf
.
save_state_hdf5
(
filename
,
groupname
,
comment
=
inputtext
)
# def load_scans(mydata):
# """
# **load_scans**
def
load_scans
(
mydata
):
"""
**load_scans**
This command harvest the selected signals.
the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
# This command harvest the selected signals.
# the instructions on the scans to be taken must be in the form( as submembers ofload_scans ) ::
load_scans :
roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois
expdata : "absolutepathtoaspecfile" # this points to a spec file
#
load_scans :
#
roiaddress : "hdf5filename:nameofroigroup" # the same given in create_rois
#
expdata : "absolutepathtoaspecfile" # this points to a spec file
elastic_scans : [623]
fine_scans : [626,630,634,638,642]
n_loop : 4
long_scan : 624
#
elastic_scans : [623]
#
fine_scans : [626,630,634,638,642]
#
n_loop : 4
#
long_scan : 624
signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!!
#
signaladdress : "nameofsignalgroup" # Target group for writing Relative to ROI (and in the same file)!!!!
#############################################################
# OPTIONALS
#
order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the
# ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5])
#
#############################################################
#
# OPTIONALS
#
#
#
order : [0,1,2,3,4,5] # list of integers (0-5) which describes the order of modules in which the
#
# ROIs were defined (default is VD, VU, VB, HR, HL, HB; i.e. [0,1,2,3,4,5])
rvd : -41 # mean tth angle of HL module (default is 0.0)
rvu : 85 # mean tth angle of HR module (default is 0.0)
rvb : 121.8 # mean tth angle of HB module (default is 0.0)
rhl : 41.0 # mean tth angle of VD module (default is 0.0)
rhr : 41.0 # mean tth angle of VU module (default is 0.0)
rhb : 121.8 # mean tth angle of VB module (default is 0.0)
#
rvd : -41 # mean tth angle of HL module (default is 0.0)
#
rvu : 85 # mean tth angle of HR module (default is 0.0)
#
rvb : 121.8 # mean tth angle of HB module (default is 0.0)
#
rhl : 41.0 # mean tth angle of VD module (default is 0.0)
#
rhr : 41.0 # mean tth angle of VU module (default is 0.0)
#
rhb : 121.8 # mean tth angle of VB module (default is 0.0)
#
"""
#
#
#
"""
roiaddress
=
None
roiaddress
=
mydata
[
"roiaddress"
]
#
roiaddress=None
#
roiaddress = mydata["roiaddress"]
filename
,
groupname
=
split_hdf5_address
(
roiaddress
)
file
=
h5py
.
File
(
filename
,
"r"
)
rois
=
{}
shape
=
xrs_rois
.
load_rois_fromh5
(
file
[
groupname
],
rois
)
file
.
close
()
#
filename, groupname = split_hdf5_address (roiaddress)
#
file= h5py.File(filename,"r")
#
rois = {}
#
shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
#
file.close()
roiob
=
xrs_rois
.
roi_object
()
roiob
.
load_rois_fromMasksDict
(
rois
,
newshape
=
shape
,
kind
=
"zoom"
)
#
roiob = xrs_rois.roi_object()
#
roiob.load_rois_fromMasksDict(rois , newshape = shape, kind="zoom")
reader
=
xrs_read
.
read_id20
(
mydata
[
"expdata"
]
,
monitorcolumn
=
'kapraman'
)
#
reader = xrs_read.read_id20(mydata["expdata"] , monitorcolumn='kapraman')
reader
.
set_roiObj
(
roiob
)
#
reader.set_roiObj(roiob)
elastic_scans
=
mydata
[
"elastic_scans"
][:]
fine_scans
=
mydata
[
"fine_scans"
][:]
n_loop
=
mydata
[
"n_loop"
]
long_scan
=
mydata
[
"long_scan"
]
#
elastic_scans = mydata["elastic_scans"][:]
#
fine_scans = mydata["fine_scans"][:]
#
n_loop = mydata["n_loop"]
#
long_scan = mydata["long_scan"]
reader
.
loadelasticdirect
(
elastic_scans
)
reader
.
loadloopdirect
(
fine_scans
,
n_loop
)
print
(
" LUNGO "
)
reader
.
loadlongdirect
(
long_scan
)
#
reader.loadelasticdirect(elastic_scans)
#
reader.loadloopdirect(fine_scans,n_loop)
#
print( " LUNGO " )
#
reader.loadlongdirect(long_scan)
reader
.
getspectrum
()
#
reader.getspectrum()
reader
.
geteloss
()
reader
.
gettths
(
rvd
=
gvord
(
mydata
,
"rvd"
,
0.0
)
,
rvu
=
gvord
(
mydata
,
"rvu"
,
0.0
)
,
rvb
=
gvord
(
mydata
,
"rvb"
,
0.0
)
,
rhl
=
gvord
(
mydata
,
"rhl"
,
0.0
)
,
rhr
=
gvord
(
mydata
,
"rhr"
,
0.0
)
,
rhb
=
gvord
(
mydata
,
"rhb"
,
0.0
)
,
order
=
gvord
(
mydata
,
"order"
,
[
0
,
1
,
2
,
3
,
4
,
5
])
)
#
reader.geteloss()
#
reader.gettths(
#
rvd = gvord(mydata,"rvd",0.0) ,
#
rvu = gvord(mydata,"rvu",0.0) ,
#
rvb = gvord(mydata,"rvb",0.0) ,
#
rhl = gvord(mydata,"rhl",0.0) ,
#
rhr = gvord(mydata,"rhr",0.0) ,
#
rhb = gvord(mydata,"rhb",0.0) ,
#
order = gvord(mydata,"order", [0,1,2,3,4,5])
#
)
groupname
=
groupname
+
"/"
+
mydata
[
"signaladdress"
]
check_libre
(
filename
,
groupname
)
#
groupname = groupname+"/"+ mydata["signaladdress"]
#
check_libre( filename , groupname )
reader
.
save_state_hdf5
(
filename
,
groupname
,
comment
=
inputtext
)
#
reader.save_state_hdf5( filename, groupname , comment = inputtext )
def
volume_from_2Dimages
(
mydata
):
"""
imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved
#
def volume_from_2Dimages(mydata):
#
"""
#
imagesaddress : "test_imaging.hdf5:/ROI_A/images" # where the data have been saved
scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file
roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0
#
scan_interval : [372,375] # OPTIONAL : can be shorter then the scans effectively present in the file
#
roi_n : 0 # OPTIONAL. if not given, the first non empty found roi. Starts from 0
imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly.
#
imagesaddress : "myfile.hdf5:/path/to/hdf5/data" # OPTIONAL. the target destination for volume. if not given mayavi is launched on the fly.
"""
#
"""
reader
=
xrs_imaging
.
oneD_imaging
(
"bidon"
,
"bidon"
,
"bidon"
,
"bidon"
)
#
reader = xrs_imaging.oneD_imaging( "bidon" , "bidon", "bidon" , "bidon")
imagesaddress
=
mydata
[
"imagesaddress"
]
filename
,
groupname
=
split_hdf5_address
(
imagesaddress
)
#
imagesaddress = mydata["imagesaddress"]
#
filename, groupname = split_hdf5_address(imagesaddress)
reader
.
load_state_hdf5
(
filename
,
groupname
)
#
reader.load_state_hdf5( filename, groupname)
scan_names
=
list
(
reader
.
twoDimages
.
keys
()
)
scan_ids
=
map
(
int
,
[
name
[
4
:]
for
name
in
scan_names
]
)
order
=
np
.
argsort
(
scan_ids
)
#
scan_names = list( reader.twoDimages.keys() )
#
scan_ids = map(int, [name[4:] for name in scan_names ] )
#
order = np.argsort(scan_ids)
if
not
(
'scan_interval'
)
in
mydata
:
scan_names
=
[
scan_names
[
id
]
for
id
in
order
]
else
:
scan_interval
=
mydata
[
'scan_interval'
]
#
if not ('scan_interval') in mydata :
#
scan_names = [ scan_names[id] for id in order ]
#
else:
#
scan_interval = mydata['scan_interval']
print
(
order
)
print
(
scan_names
)
print
(
scan_interval
)
scan_names
=
[
scan_names
[
id
]
for
id
in
order
if
scan_ids
[
id
]
>=
scan_interval
[
0
]
and
scan_ids
[
id
]
<
scan_interval
[
1
]
]
#
print( order)
#
print( scan_names)
#
print( scan_interval)
#
scan_names = [ scan_names[id] for id in order if scan_ids[id]>=scan_interval[0] and scan_ids[id]<scan_interval[1] ]
first_name
=
scan_names
[
0
]
roi_n
=
0
#
first_name = scan_names[0]
#
roi_n=0
if
not
(
'roi_n'
in
mydata
):
while
(
1
):
shape
=
reader
.
twoDimages
[
first_name
][
roi_n
].
matrix
.
shape
if
shape
!=
(
0
,)
:
break
roi_n
+=
1
else
:
roi_n
=
mydata
[
"roi_n"
]
shape
=
reader
.
twoDimages
[
first_name
][
roi_n
].
matrix
.
shape
#
if not ('roi_n' in mydata ):
#
while(1):
#
shape = reader.twoDimages[first_name][roi_n].matrix.shape
#
if shape != (0,) :
#
break
#
roi_n+=1
#
else:
#
roi_n = mydata["roi_n"]
#
shape = reader.twoDimages[first_name][roi_n].matrix.shape
Volume
=
np
.
zeros
((
shape
[
0
],
shape
[
1
]
,
len
(
scan_names
)
))
#
Volume = np.zeros(( shape[0], shape[1] , len(scan_names) ))
for
i
,
scanname
in
enumerate
(
scan_names
):
Volume
[:,:,
i
]
=
reader
.
twoDimages
[
scanname
][
roi_n
].
matrix
#
for i,scanname in enumerate(scan_names):
#
Volume[:,:,i] = reader.twoDimages[scanname][roi_n].matrix
if
(
'volumeaddress'
)
in
mydata
:
filename
,
groupname
=
split_hdf5_address
(
mydata
[
'volumeaddress'
]
)
#
if ('volumeaddress') in mydata :
#
filename, groupname = split_hdf5_address( mydata['volumeaddress'] )
h5
=
h5py
.
File
(
filename
,
'a'
)
check_libre
(
h5
,
groupname
)
h5
[
groupname
]
=
Volume
h5
.
close
()
h5
=
None
else
:
view_Volume_myavi_
(
Volume
)
#
h5=h5py.File(filename,'a')
#
check_libre( h5 , groupname )
#
h5[groupname] = Volume
#
h5.close()
#
h5=None
#
else:
#
view_Volume_myavi_(Volume)
def
view_Volume_myavi
(
mydata
):
"""
volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume.
"""
#
def view_Volume_myavi(mydata):
#
"""
#
volume_address : "myfile.hdf5:/path/to/hdf5/group" # the target destination for volume.
#
"""
filename
,
groupname
=
split_hdf5_address
(
mydata
[
'volume_address'
]
)
h5
=
h5py
.
File
(
filename
,
'r'
)
Volume
=
h5
[
groupname
]
[:]
h5
.
close
()
h5
=
None
#
filename, groupname = split_hdf5_address( mydata['volume_address'] )
#
h5=h5py.File(filename,'r')
#
Volume = h5[groupname] [:]
#
h5.close()
#
h5=None
isolevel
=
mydata
[
'isolevel'
]
opacity
=
mydata
[
'opacity'
]
#
isolevel = mydata['isolevel']
#
opacity = mydata['opacity']
view_Volume_myavi_
(
Volume
,
isolevel
,
opacity
)
#
view_Volume_myavi_(Volume, isolevel, opacity)
def
view_Volume_myavi_
(
V
,
isolevel
,
opacity
)
:
print
(
" IN view "
)
src
=
mlab
.
pipeline
.
scalar_field
(
V
)
mlab
.
pipeline
.
iso_surface
(
src
,
contours
=
[
V
.
min
()
+
isolevel
*
V
.
ptp
(),
],
opacity
=
opacity
)
mlab
.
show
()
src
=
mlab
.
pipeline
.
scalar_field
(
V
)
mlab
.
pipeline
.
volume
(
src
,
vmin
=
1000.0
,
vmax
=
2000.0
)
mlab
.
show
()
#
def view_Volume_myavi_(V, isolevel, opacity) :
#
print( " IN view ")
#
src = mlab.pipeline.scalar_field(V)
#
mlab.pipeline.iso_surface(src, contours=[V.min()+isolevel*V.ptp(), ], opacity=opacity)
#
mlab.show()
#
src = mlab.pipeline.scalar_field(V)
#
mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0)
#
mlab.show()
def
calculate_recenterings
(
mydata
):
...
...
@@ -683,6 +569,8 @@ def calculate_recenterings(mydata):
target: "recenterings.h5:/recenterings4rois"
#
"""
allowed_keys
=
[
"bariA"
,
"bariB"
,
"target"
,
]
check_allowed_keys
(
mydata
,
allowed_keys
)
bariA
=
mydata
[
"bariA"
]
bariA_filename
,
bariA_groupname
=
split_hdf5_address
(
bariA
)
...
...
@@ -736,234 +624,229 @@ def calculate_recenterings(mydata):
h5f
=
None
def
sum_scans2maps
(
mydata
):
#
def sum_scans2maps(mydata):
roiaddress
=
None
roiaddress
=
mydata
[
"mask_file"
]
#
roiaddress=None
#
roiaddress = mydata["mask_file"]
filename
,
groupname
=
split_hdf5_address
(
roiaddress
)
#
filename, groupname = split_hdf5_address( roiaddress)
file
=
h5py
.
File
(
filename
,
"r"
)
rois
=
{}
shape
=
xrs_rois
.
load_rois_fromh5
(
file
[
groupname
],
rois
)
file
.
close
()
#
file= h5py.File(filename,"r")
#
rois = {}
#
shape=xrs_rois.load_rois_fromh5(file[groupname],rois)
#
file.close()
specfile_name
=
mydata
[
"spec_file"
]
Scan_Variable
=
mydata
[
"Scan_Variable"
]
Motor_Variable
=
mydata
[
"Motor_Variable"
]
#
specfile_name = mydata["spec_file"]
#
Scan_Variable = mydata["Scan_Variable"]
#
Motor_Variable = mydata["Motor_Variable"]
specfile
=
SpecIO
.
Specfile
(
specfile_name
)
#
specfile = SpecIO.Specfile( specfile_name )
dirname
=
os
.
path
.
dirname
(
specfile_name
)
basename
=
os
.
path
.
basename
(
specfile_name
)
#
dirname = os.path.dirname( specfile_name )
#
basename = os.path.basename( specfile_name )
scans_infos
=
[]
signals
=
[]
#
scans_infos = []
#
signals = []
s1
=
int
(
mydata
[
"first_scan"
])
s2
=
int
(
mydata
[
"last_scan"
])
#
s1 = int(mydata["first_scan"])
#
s2 = int(mydata["last_scan"])
roi_names
=
list
(
rois
.
keys
())
roi_list
=
[
rois
[
k
]
for
k
in
roi_names
]
#
roi_names = list(rois.keys())
#
roi_list = [ rois[k] for k in roi_names ]
for
i
in
range
(
s1
,
s2
+
1
):
# print " SCAN lettura " , i
scan
=
specfile
.
select
(
str
(
i
))
scan_data
=
scan
.
data
()
#
for i in range(s1,s2+1):
#
# print " SCAN lettura " , i
#
scan = specfile.select(str(i))
#
scan_data = scan.data()
scan_themotor
=
scan
.
motorpos
(
Motor_Variable
)
scan_othermotors
=
[
scan
.
motorpos
(
name
)
for
name
in
scan
.
allmotors
()
if
name
!=
Motor_Variable
]
#
scan_themotor = scan.motorpos( Motor_Variable )
#
scan_othermotors = [ scan.motorpos( name ) for name in scan.allmotors() if name != Motor_Variable ]
othermotorsname
=
[
name
for
name
in
scan
.
allmotors
()
if
name
!=
Motor_Variable
]
#
othermotorsname = [name for name in scan.allmotors() if name != Motor_Variable]
labels
=
scan
.
alllabels
()
scan_variable
=
scan_data
[
labels
.
index
(
Scan_Variable
)
,
:]
scan_ccdnos
=
scan_data
[
labels
.
index
(
"ccdno"
)
,
:].
astype
(
"i"
)
signal
=
[]
for
no
in
scan_ccdnos
:
print
(
" opening image "
,
os
.
path
.
join
(
dirname
,
"edf"
,
basename
+
"_"
+
str
(
no
)
+
".edf"
))
data
=
fabio
.
open
(
os
.
path
.
join
(
dirname
,
"edf"
,
basename
+
"_"
+
str
(
no
)
+
".edf"
)
).
data
tok
=
[
(
data
[
corner
[
0
]:
corner
[
0
]
+
mask
.
shape
[
0
],
corner
[
1
]:
corner
[
1
]
+
mask
.
shape
[
1
]]
*
mask
).
sum
()
for
corner
,
mask
in
roi_list
]
signal
.
append
(
tok
)
# print " OK "
#
labels = scan.alllabels()
#
scan_variable = scan_data[ labels.index( Scan_Variable ) , :]
#
scan_ccdnos = scan_data[ labels.index( "ccdno" ) , :].astype("i")
#
signal = []
#
for no in scan_ccdnos:
#
print( " opening image ", os.path.join( dirname , "edf", basename+"_"+str(no)+".edf"))
#
data = fabio.open( os.path.join( dirname , "edf", basename+"_"+str(no)+".edf" ) ).data
#
tok = [ (data[corner[0]:corner[0]+mask.shape[0], corner[1]:corner[1]+mask.shape[1]]*mask).sum() for corner, mask in roi_list ]
#
signal.append(tok)
#
# print " OK "
# print signal
# print " Appendo " , signal
#
# print signal
#
# print " Appendo " , signal
signals
.
append
(
np
.
array
(
signal
))
# print "OK "
scans_infos
.
append
(
[
scan_themotor
,
scan_othermotors
,
scan_variable
]
)
#
signals.append(np.array(signal))
#
# print "OK "
#
scans_infos.append( [scan_themotor, scan_othermotors, scan_variable ] )
# print " DONE scan " , i
#
# print " DONE scan " , i
done
=
np
.
zeros
(
len
(
scans_infos
)
,
"i"
)
DONE
=
{}
synthes
=
{}
for
kscan
in
range
(
len
(
scans_infos
)
):
# print " kscan " , kscan
DONE
[
kscan
]
=
0
if
done
[
kscan
]:
continue
else
:
res
=
np
.
array
(
signals
[
kscan
])
#
done = np.zeros( len(scans_infos) ,"i")
#
DONE={}
#
synthes = {}
#
for kscan in range(len(scans_infos) ):
#
# print " kscan " , kscan
#
DONE[kscan]=0
#
if done[kscan]:
#
continue
#
else:
#
res = np.array(signals[kscan])
done
[
kscan
]
=
1
kinfos
=
scans_infos
[
kscan
]
kM
,
kOM
,
kV
=
kinfos
#
done[kscan]=1
#
kinfos = scans_infos[kscan]
#
kM, kOM, kV = kinfos
for
oscan
in
range
(
len
(
scans_infos
))
:
# print " oscan " , oscan
if
done
[
oscan
]:
continue
else
:
oinfos
=
scans_infos
[
oscan
]
#
for oscan in range(len(scans_infos)) :
#
# print " oscan " , oscan
#
if done[oscan]:
#
continue
#
else:
#
oinfos = scans_infos[oscan]
oM
,
oOM
,
oV
=
oinfos
if
kM
==
oM
: