Commit 5b8ae239 authored by Alessandro Mirone's avatar Alessandro Mirone
Browse files

da testare : debinning per avere migliori immagini

parent 1919c3e2
......@@ -43,6 +43,9 @@ written in the code for the fonctionality itself. You can also write mathematic
and even include graphics.
"""
# from mayavi import mlab
import string
import numpy
import numpy as np
......@@ -88,7 +91,7 @@ import xrs_read
import theory
import extraction
import xrs_imaging
import superr
#################################################################
## THIS redefinition of yaml is used to keep the entry ordering
......@@ -550,7 +553,7 @@ def view_Volume_myavi(mydata):
def view_Volume_myavi_(V) :
from mayavi import mlab
src = mlab.pipeline.scalar_field(V)
mlab.pipeline.iso_surface(src, contours=[V.min()+0.05*V.ptp(), ], opacity=0.6)
mlab.pipeline.iso_surface(src, contours=[V.min()+0.1*V.ptp(), ], opacity=0.6)
mlab.show()
src = mlab.pipeline.scalar_field(V)
mlab.pipeline.volume(src,vmin=1000.0, vmax=2000.0)
......@@ -748,8 +751,78 @@ def create_rois(mydata):
h5.close()
def superR_getVolume(mydata):
"""
Here an example of the input file dedicated section ::
superR_getVolume :
scalprods_address : "scalprods.hdf5:scal_prods/"
target_filename : "volume.hdf5"
debin: : [2,1]
niter : 20
beta : 1.0e-8
eps : 0.000002
scalprods_address points to the scalDS, scalDD, scalSS scalar products
calculated by superR_scal_deltaXimages.
The volume will be written in file target_filename( which must not exist already),
in the datagroup Volume.
The parameter debin dafaults to [1,1]
It is used to increase a dimension Z,Y or both , to make it match with X
The parameters for the Fista optimisation cicle are :
- niter : the number of fista cycles
- beta : the factor of the Total Variation penalisation term
- eps : a parameter for the convergence of the Chambolle-Pock TV denoising phase
"""
scalprods_address = mydata["scalprods_address"]
scalprods_filename, scalprods_groupname = split_hdf5_address(scalprods_address)
target_filename = mydata["target_filename"]
if os.path.exists(target_filename):
sys.stdout.write("Error : file %s exists already. Remove it yourself\n"%target_filename)
sys.stderr.write("Error : file %s exists already. Remove it yourself\n"%target_filename)
sys.exit(1)
niter = mydata["niter"]
beta = mydata["beta"]
eps = mydata["eps"]
if not mydata.has_key('debin'):
debin = [1,1]
else:
debin = mydata['debin']
h5f = h5py.File(scalprods_filename)
h5 = h5f [scalprods_groupname]
scalDS = h5["scalDS"][:]
scalDD = h5["scalDD"][:]
scalSS = h5["scalSS"][:]
h5f.close()
DIMZ,DIMY,DIMX = scalDS.shape
if debin != [1,1]:
nuovoDS = numpy.zeros([DIMZ, debin[0], DIMY, debin[1], DIMX ], "d")
scalDS.shape = DIMZ,1,DIMY,1,DIMX
nuovoDS[:] = scalDS
scalDS = nuovoDS
Volume = superr.superr( scalDD, scalDS, scalSS, niter=niter, beta=beta)
h5 = h5py.File(target_filename,"w")
h5["Volume"] = Volume
h5.close()
def superR_scal_deltaXimages(mydata):
""" This step supposes that you have:
......@@ -795,7 +868,9 @@ def superR_scal_deltaXimages(mydata):
###################################
# optional
nbin : 5 # defaults to 1
nbin : 5 # defaults to 1
# it will bin 5 xpixels in one
roi_keys : [60, 64, 35, 69, 34, 24, 5, 6, 71, 70, 39, 58, 56, 33]
# roi_keys default to all the keys present in delta_address
......@@ -803,7 +878,7 @@ def superR_scal_deltaXimages(mydata):
# defaults to None. If given the integrated image and the average line will be written
# to check the superposability between the foil scans and the sample scans
If nbin is given the dimension XDIM of the superresolution axis, will be reduced,
If nbin is given the dimensios of the superresolution axis, will be reduced or increased,
by binning together the foil PSFs.
What the program will produce, under *target_address* datagroup, is
......@@ -824,6 +899,7 @@ def superR_scal_deltaXimages(mydata):
of the foil. The sum over the ROIs is implicitely done.
"""
delta_address = mydata["delta_address"]
delta_filename, delta_groupname = split_hdf5_address(delta_address)
......@@ -836,8 +912,8 @@ def superR_scal_deltaXimages(mydata):
h5f = h5py.File(delta_filename,"r")
h5 = h5f[delta_groupname]
if not mydata.has_key(''):
width = 1
if not mydata.has_key('nbin'):
width = [1,1,1]
else:
width = mydata['nbin']
......@@ -861,8 +937,10 @@ def superR_scal_deltaXimages(mydata):
integrated_images [t]=[ m.sum(axis=0) ,0, 0, None, None] # the second entry is left free for the sample integrated image, the third for the orig
# the 4th the cornerpos, to be initialised by sample data, the fifth cornerpos by origdelta images ( if origdelta is read)
if width != 1 : # REBINNING
m=m[:(m.shape[0]/width)*width].reshape(-1, width, m.shape[1],m.shape[2]).sum(axis=1)/width
if width[2] != [1,1,1] : # REBINNING
nbin = width[2]
assert(nbin>1)
m=m[:(m.shape[0]/nbin)*nbin].reshape(-1, nbin, m.shape[1],m.shape[2]).sum(axis=1)/nbin
sonde [t] = m
......@@ -899,7 +977,6 @@ def superR_scal_deltaXimages(mydata):
h5 = h5f[sample_groupname]
if not mydata.has_key('scan_interval'):
roi_keys = list(h5.keys())
zscan_keys = sorted( list(h5.keys()) , key = lambda x: int(filter(str.isdigit, str(x) )) )
else:
zscan_keys =[ "Scan%d"%i for i in range(*list(mydata['scan_interval']))]
......@@ -998,7 +1075,7 @@ def superR_scal_deltaXimages(mydata):
h5 = h5f[target_groupname]
h5["scalSS"] = scalSS
h5.create_dataset("scalDS", ( ZDIM,YDIM,XDIM ), dtype='d')
h5.create_dataset("scalDS", ( width[0]*ZDIM , width[1]*YDIM , XDIM ), dtype='d')
h5.create_dataset("scalDD", ( 1, ), dtype='d')
h5["scalDS"][:]=0
h5["scalDD"][:]=0
......@@ -1231,7 +1308,8 @@ swissknife_operations={
"view_Volume_myavi" : view_Volume_myavi,
"superR_scal_deltaXimages" : superR_scal_deltaXimages,
"superR_fit_responses" : superR_fit_responses,
"superR_recreate_rois" : superR_recreate_rois
"superR_recreate_rois" : superR_recreate_rois,
"superR_getVolume" : superR_getVolume
}
parallelised_operations = [ "loadscan_2Dimages" , "superR_scal_deltaXimages" , "superR_fit_responses" , "superR_recreate_rois" ]
......
......@@ -5,6 +5,7 @@ import scipy.stats
import scipy.ndimage.filters as filt
import sys
import pickle
import os
from scipy.optimize import minimize
scipymin=1
......@@ -391,6 +392,7 @@ def Fista( data , solution , s2d, d2s, solution_shape , parallel = 0 , ni
def trajectory_error( XYXY , O_spots, opticalPSF, nref, reponse_pixel , retrieve_spots, suggerimento=None):
if myrank==0: print XYXY
Nspots = O_spots.shape[0]
......@@ -529,9 +531,12 @@ def refine_trajectory(O_spots, opticalPSF, trajectory , nref , reponse_pixel
if myrank==0:
print indent +"IMPROVING TRAJECTORY "
res = minimize(trajectory_error,np.array([X1,X2]),( O_spots, opticalPSF, nref , reponse_pixel ,0 ,suggerimento ), method='Powell',
# method='Nelder-Mead',
options={'disp': False, 'maxiter': None, 'return_all': False, 'maxfev': None, 'xtol': 0.001, 'ftol': 0.001})
res = minimize(trajectory_error,np.array([X1,X2]),( O_spots, opticalPSF, nref , reponse_pixel ,0 ,suggerimento ), # method='Powell',
method='Nelder-Mead',
options={'disp': False, 'maxiter': 30,
'return_all': False,
'maxfev': None, 'xtol': 0.001,
'ftol': 0.001 } )
if myrank==0:
print " ... MINIMO IN ", res.x,
print " INIZIALE ", [X1,X2]
......@@ -541,12 +546,13 @@ def refine_trajectory(O_spots, opticalPSF, trajectory , nref , reponse_pixel
else:
dodebug = False
if myrank==0:
print indent + "IMPROVING TRAJECTORY "
res = minimize(trajectory_error,np.array([X1,Y1,X2,Y2]),( O_spots, opticalPSF, nref , reponse_pixel ,0 ), method='Powell',
# method='Nelder-Mead',
options={'disp': False, 'maxiter': None, 'return_all': False, 'maxfev': None, 'xtol': 0.001, 'ftol': 0.001})
dodebug = True
res = minimize(trajectory_error,np.array([X1,Y1,X2,Y2]),( O_spots, opticalPSF, nref , reponse_pixel ,0 ), # method='Powell',
method='Nelder-Mead',
options={'disp': dodebug, 'maxiter': 50, 'return_all': False, 'maxfev': None, 'xtol': None, 'ftol': 0.001})
# res = minimize(trajectory_error,np.array([X1,Y1,X2,Y2]),( O_spots, opticalPSF, nref ))
if myrank==0:
print " ... MINIMO IN ", res.x,
......@@ -561,7 +567,6 @@ def refine_trajectory(O_spots, opticalPSF, trajectory , nref , reponse_pixel
def fit_reponse(trajectory, O_spots , nref, reponse_pixel, beta=0.1, niter=500 ) :
solution = np.zeros( [O_spots[0].shape[0]*nref, O_spots[0].shape[1]*nref ] , "f" )
total_I = []
total_J = []
......@@ -706,11 +711,11 @@ def get_spots_list( filename , groupname ):
if m.shape!=(0,):
stat = m.sum(axis=-1).sum(axis=-1)
### if stat.min()>stat.max()/2.0 and stat.min() >250.0:
res.append(m)
nomi_scan.append( sn )
stats.append(stat)
origini[sn] = h5[groupname+"/"+sn+"/cornerpos"][:]
if stat.min()>stat.max()/2.0 and stat.min() >250.0:
res.append(m)
nomi_scan.append( sn )
stats.append(stat)
origini[sn] = h5[groupname+"/"+sn+"/cornerpos"][:]
h5.close()
......@@ -1003,6 +1008,7 @@ def DOFIT(filename=None, groupname=None, nref=5, niter_optical=500, beta_optical
h5 = h5py.File(target_file,"w")
h5["response"] = newreponse
else:
os.system("touch %s" %target_file )
h5 = h5py.File(target_file,"a")
for iterm, (O_spots, name, stat) in enumerate(zip(O_spots_list, nomi_rois, stats)):
......@@ -1018,9 +1024,10 @@ def DOFIT(filename=None, groupname=None, nref=5, niter_optical=500, beta_optical
h5group["Yslope" ] = trajectory.Y.slope
h5group["stat" ] = stat
h5group["nref" ] = nref
h5.flush()
h5.close()
h5 = None
if myrank == 0:
tl = [trajectory_dic[i] for i in range( len(trajectory_list) )]
file = open( "trajectories.pickle" ,"w")
......
import numpy as np
import h5py
import math
myrank=0
import skimage.restoration
def superr( scalDD, scalDS, scalSS, niter=15, beta=1.0e-8):
"""
- scalDS which is an array [ZDIM,YDIM,XDIM] , type "d" .
- scalDD which is the total sum of the squared datas.
- scalSS which is an array [XDIM,XDIM] , type "d" .
"""
ZDIM,YDIM,XDIM = scalDS.shape
Volume = np.zeros( [ZDIM,YDIM,XDIM] ,"f" )
assert( scalSS.shape == (XDIM,XDIM))
scalSS = scalSS.astype("f")
scalDS = scalDS.astype("f")
Fista ( scalDD, scalDS, scalSS, Volume,niter=niter, beta=beta)
return Volume
def calculate_grad( scalDD, scalDS , scalSS, solution, grad) :
grad [:] = np.tensordot( solution, scalSS, axes=[[-1],[-1]])
err = (grad*solution).sum()
if scalDS is not None:
err -= (scalDS*solution).sum()*2
err += scalDD
grad [:] -= scalDS
return err/2
def Fista( scalDD, scalDS , scalSS, solution , niter=500, beta=0.1 ):
grad = np.zeros_like(solution)
grad2 = np.zeros_like(solution)
x_old = np.zeros_like(solution)
y = np.zeros_like(solution)
err = 0.0
err=calculate_grad( scalDD, scalDS , scalSS, solution, grad)
for i in range(20):
calculate_grad(None, None , scalSS, grad, grad2)
Lip = math.sqrt( np.linalg.norm(grad2/100000.0) )*100000
grad[:] = grad2/ Lip
if myrank==0:
print "LIP ", Lip
Lip = Lip*1.2
t=1.0
y[:] = solution
x_old[:] = solution
for iter in range(abs(niter)):
err = calculate_grad(scalDD, scalDS , scalSS, y, grad)
solution[:] = y - grad/Lip
solution[:]=skimage.restoration.denoise_tv_chambolle(solution, weight=beta, eps=0.000002)
## solution[:] = np.maximum(solution, 0)
tnew = ( 1+math.sqrt(1.0+4*t*t) )/2
y[:] = solution +(t-1)/tnew *( solution - x_old )
t = tnew
if niter<0:
t=1
x_old[:] = solution
if myrank==0:
print " errore est %e mod_grad est %e\n" % ( err, grad.std())
......@@ -26,6 +26,8 @@ temperature = LinearSegmentedColormap('temperature',cdict, 65536)
filea = "demo_foilroi.h5"
fileb = "demo_newrois_scan.h5"
filer = "demo_responses_0.0_0.2_Tfreezed.h5"
filer = "demo_responses_0.0_2.0_Tfreezed.h5"
totf = fabio.open("FIGS/cumulata.edf","r").data
......@@ -140,10 +142,6 @@ def mostra(chiave,data_a, data_b , data_b_ori, sez, pointer, totf, shor, inset_b
)
fig.savefig('FIGS/tutteroi.png', dpi=200, bbox_inches='tight')
fig = plt.figure()
inset = totf[ ob[0]-3*0: ob[0]+shor[1]+3*0, ob[1]-3*0: ob[1]+shor[2]+3*0 ]
......@@ -170,7 +168,6 @@ def mostra(chiave,data_a, data_b , data_b_ori, sez, pointer, totf, shor, inset_b
else:
fig.savefig(d+'insetsmall.png', dpi=200, bbox_inches='tight')
fig = plt.figure()
inset = totf[ ob[0]-3*0: ob[0]+shor[1]+3*0, ob[1]-3*0: ob[1]+shor[2]+3*0 ]
......@@ -190,12 +187,11 @@ def mostra(chiave,data_a, data_b , data_b_ori, sez, pointer, totf, shor, inset_b
fig.savefig(d+'insetcollage.png', dpi=200, bbox_inches='tight')
fig = plt.figure()
data = h5resp[str(chiave)]["data"][:]
imshow(data, interpolation='nearest')
data = h5resp[str(chiave)]["data"][:]+1
data[0,0]=0
imshow(data+1.0, interpolation='nearest', norm=LogNorm(vmin=1.1 ))
if SHOW :
show()
else:
......@@ -204,17 +200,21 @@ def mostra(chiave,data_a, data_b , data_b_ori, sez, pointer, totf, shor, inset_b
x = arange(len(statsa))+sez
xx = arange(len(statsbb))
plot(x,statsa)
plot(xx,statsbb)
fig = plt.figure()
plot(x,statsa )
plot(xx,statsbb)
# plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')
if SHOW :
show()
else:
fig.savefig(d+'largeplot.png', dpi=200, bbox_inches='tight')
plot(x,statsa)
plot(x,statsb)
fig = plt.figure()
plot(x,statsa )
plot(x,statsb )
# plt.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off', left='off', labelleft='off')
if SHOW :
show()
else:
......
......@@ -4,10 +4,6 @@ import math
myrank=0
import skimage.restoration
def main2():
file = "demo.hdf5"
group_delta = "ROI_B_FIT8/scanXX/scans/Scan273/"
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment