Commit eb5b85f9 authored by operator ID01's avatar operator ID01
Browse files

make flatfield cxi compatible. + some cleaning

parent b27c5016
......@@ -2,7 +2,7 @@
#TODO - ask the question for overwrite rather than skipping i.e if a mistake is made
#TODO - delete a scan from an hdf5 file cleanly - use 'del' function
#TODO - varying count rates between images
"""
# make a flatfield of Eiger data
# generate the data
......@@ -14,8 +14,8 @@ h5file = 'align.h5' # output file
imgdir = None#'/data/visitor/hc3211/id01/mpx/e17089/'
#speclist = glob.glob(specdir+'e16014.spec')
specfile = '/data/visitor/hc3198/id01/spec/%s.spec'%sample # source file
scanno = ("16.1", "17.1", "18.1", "19.1", "20.1", "21.1", "22.1", "23.1", "24.1", "25.1", "26.1", "27.1") # None for all, must be tuple i.e. ("1.1",) for single scanno
specfile = '/data/id01/inhouse/IHR/EBScomm0306/%s.spec'%sample # source file
scanno = ("29.1",) # None for all, must be tuple i.e. ("1.1",) for single scanno
with id01h5.ID01File(h5file,'a') as h5f: # the safe way
s = h5f.addSample(sample)
......@@ -25,19 +25,22 @@ with id01h5.ID01File(h5file,'a') as h5f: # the safe way
imgroot=imgdir,
overwrite=False, # skip if scan exists
compr_lvl=6)
"""
from id01lib.flatfield import Flatfieldv2
import h5py as h5
import sys
import numpy as np
data=h5.File('align.h5','r')
data=h5.File('EBScomm0306.h5','r')
#mask=h5.File('/data/id01/inhouse/leake/beamReconstructions/EigerMask.h5','r')['/image_data'].value
#create a flatfield
ff_path = ''
ff_data0 = data['/flatfield/29.1/data_0'][:,:,:]
"""
# remove bad frames, eiger got some bad packets
remove=[3,25,26,44,54,55,60,66,73,76,77,90,91,92,94,96,102,116,123,131,132,136,137,142,152,157,165,180,181,189,192]
......@@ -53,9 +56,11 @@ ff_data0=np.delete(ff_data0,np.arange(0,42,1),axis=0)
ff_data0=np.delete(ff_data0,[54,87,88,89],axis=0)
ff_data0[ff_data0>=1000000]=0
"""
ff_path=''
ff_init = Flatfieldv2(ff_data0, ff_path,detector='eiger2M', auto = False) #,mask=mask,auto = False)
ff_init = Flatfieldv2(ff_data0, ff_path,detector='mpx4', tolerance=98, auto = False) #,mask=mask,auto = False)
#ff_data1 = h5.get_scan_images(h5fn,2)[:]
#ff_data2 = h5.get_scan_images(h5fn,3)[:]
#ff_data3 = h5.get_scan_images(h5fn,4)[:]
......@@ -70,6 +75,8 @@ ff_init.calc_I_bar_sigma()
ff_init.dead_pixel_mask = np.invert((np.isnan(ff_init.I_bar)) | \
(np.isnan(np.sum(ff_init.data_dev,axis=0))) | \
(ff_init.I_bar>(np.median(np.round(ff_init.I_bar))*100)))
ff_init.tot_mask_cxi[np.invert(ff_init.dead_pixel_mask)] += 128 # BAD PIXEL
# plot the integrated intensity in the detector as a function of image
ff_init.apply_mask2data(np.invert(ff_init.dead_pixel_mask))
......@@ -82,7 +89,7 @@ ff_init.plot_int_det_cts(mask=ff_init.dead_pixel_mask)
#ff_init.set_I_min_max()
print("set I min/max")
ff_init.I_lims=[30,153]
ff_init.I_lims=[30,15000]
ff_init.mask_1 = ((ff_init.I_bar>= ff_init.I_lims[0]) & \
(ff_init.I_bar <= ff_init.I_lims[1])) #& \ (ff_init.dead_pixel_mask == False)
......@@ -111,7 +118,7 @@ ff_init.plot_bad_pxl_mask(ff_init.tot_mask,id='final')
# look at the standard deviation across the detector
ff_init.apply_mask2data(np.invert(ff_init.tot_mask))
ff_init.plot_SD_image(clim=[0.0,0.2])
ff_init.plot_SD_image()
print("plot SD image")
ff_init.gen_ff()
print("generate ff")
......@@ -125,6 +132,8 @@ print("plot worst pixel")
#ff_init.calc_ff_ID01()
ff_init.make_ff_h5()
ff_init.make_ff_cxi()
#ff,ff_unc = ff_init.read_ff_h5()
......@@ -4,6 +4,7 @@ import pylab as pl
import os,datetime
import h5py
from scipy.optimize import curve_fit
import pdb
"""
Flatfield class - SJL 20131103
......@@ -634,6 +635,18 @@ class Flatfieldv2:
self.check_I_lims = True
self.check_tolerance = True
self.tot_mask = np.ones(self.data[0,:,:].shape)
self.tot_mask_cxi =np.zeros(self.data[0,:,:].shape,dtype = np.int32)
#CXI definition
# CXI_PIXEL_IS_INVALID =1
# CXI_PIXEL_IS_SATURATED =2
# CXI_PIXEL_IS_HOT =4
# CXI_PIXEL_IS_DEAD =8
# CXI_PIXEL_IS_SHADOWED =16
# CXI_PIXEL_IS_BAD =128
# CXI_PIXEL_IS_MISSING =512
# CXI_PIXEL_IS_NOISY =1024
# CXI_PIXEL_HAS_SIGNAL =4096
self.K = 0.0
time = datetime.datetime.utcnow()
self.ff_path = path+id+'ff_'+time.strftime("%Y%m%d-%H%M/")
......@@ -702,6 +715,8 @@ class Flatfieldv2:
self.mask_1 = ((self.I_bar>= self.I_lims[0]) & \
(self.I_bar <= self.I_lims[1])) & \
(self.dead_pixel_mask == False)
self.tot_mask_cxi[self.mask_1] += 128 # BAD PIXEL
self.plot_bad_pxl_mask(self.mask_1,id='1')
print("plot bad pixel mask")
......@@ -734,11 +749,18 @@ class Flatfieldv2:
#self.get_params()
def deadpixels2nan(self):
self.data[self.data==0]=np.nan
"""
remove divide by zeros errors later, a zero is a dead pixel in a flatfield illumination
"""
self.data[self.data==0] = np.nan
tmpdata = self.data.sum(axis=0)
self.tot_mask_cxi[tmpdata==0] += 8 # DEAD PIXEL
print(self.tot_mask_cxi.sum(),(self.data==0).sum())
def calc_I_bar_sigma(self,):
"""
calculate the mean intensity I_bar and its standard deviation I_sigma
"""
self.deadpixels2nan()
self.I_bar = np.nanmean(self.data,axis=0)
......@@ -750,11 +772,15 @@ class Flatfieldv2:
return self.I_bar, self.I_sigma
def plot_int_det_cts(self,mask):
"""
plot intensity variation as a function of image number
"""
pl.figure(1)
tmp = self.data.copy()
for i in np.arange(tmp.shape[0]):
tmp[i,:,:][mask==False]=np.nan
data2plot = np.nansum(np.nansum(tmp,axis=1),axis=1)
print("total photons per pixel:",data2plot.sum()/(516**2),"\n No. images:,",data2plot.shape[0])
pl.plot(((data2plot-np.nanmean(data2plot))/np.nanmean(data2plot))*100)
pl.xlabel('Image no.')
pl.ylabel('Total Intensity Variation (%)')
......@@ -763,6 +789,9 @@ class Flatfieldv2:
pl.clf()
def scale_data(self,mask):
"""
apply correction factor to data
"""
pl.figure(1)
tmp = self.data.copy()
for i in np.arange(tmp.shape[0]):
......@@ -921,6 +950,8 @@ class Flatfieldv2:
sn_sigma = coeff[2]
self.K= sn_peak + 4*sn_sigma
self.mask_2 = self.I_sigma<(np.sqrt(self.I_bar)*self.K)
self.tot_mask_cxi[self.mask_2] += 128 # BAD PIXEL
def set_tolerance(self,):
while self.check_tolerance:
......@@ -966,6 +997,8 @@ class Flatfieldv2:
pl.savefig(self.ff_path+'stats_4_tol_as_ims.pdf')
pl.clf()
self.tot_mask_cxi[tmp_data>=self.tolerance] += 128 # BAD PIXEL
return (tmp_data>=self.tolerance)
def final_mask(self,list_masks):
......@@ -979,14 +1012,20 @@ class Flatfieldv2:
pl.savefig(self.ff_path+'image-ff_final_mask.pdf')
pl.clf()
def plot_SD_image(self,clim=[0,0.05]):
def plot_SD_image(self,clim=None):
# hist of SD error and image of SD error
pl.figure(1)
pl.subplot(221)
print("mask sum:",self.tot_mask.sum())
tmp = self.I_sigma*self.tot_mask/(np.nansum((self.I_bar*self.tot_mask))/np.nansum(self.tot_mask))#self.I_sigma/self.I_bar.sum()*(np.prod(self.I_bar.shape)-(self.I_bar==0).sum()))
mean_I_sigma = np.nanmean(tmp.flatten())
I_sigma_sigma = np.nanstd(tmp.flatten())
if clim == None:
clim = [mean_I_sigma-I_sigma_sigma*3,mean_I_sigma+I_sigma_sigma*3]
#print(clim)
n, bins, patches = pl.hist(tmp.flatten()[np.isfinite(tmp.flatten())], np.arange(0,np.sqrt(mean_I_sigma),np.sqrt(mean_I_sigma)*2/100), normed=0, facecolor='green', alpha=0.5)
pl.title('normalised SD distribution')
pl.xlim([mean_I_sigma-I_sigma_sigma*5,mean_I_sigma+I_sigma_sigma*5])
pl.xlabel('SD')
pl.ylabel('Frequency')
pl.subplot(222)
......@@ -1012,11 +1051,15 @@ class Flatfieldv2:
self.ff = np.where(self.tot_mask,self.I_bar_sf/self.I_bar,0)
self.ff_unc = np.where(self.tot_mask,np.sqrt(1.0/self.data.shape[2])*self.I_sigma/self.I_bar,0)
def plot_ff(self,clim=[0.95,1.05],clim_unc=[0,0.025]):
def plot_ff(self,clim=None,clim_unc=None):
pl.figure(1)
pl.subplot(221)
pl.title('Flatfield')
pl.imshow(self.ff,cmap='spectral')
if clim == None:
ffbar = np.nanmean(self.ff)
ffsigma = np.nanstd(self.ff)
clim=[ffbar-ffsigma*3,ffbar+ffsigma*3]
pl.clim(clim)
pl.colorbar()
pl.subplot(222)
......@@ -1024,6 +1067,10 @@ class Flatfieldv2:
pl.imshow(self.ff_unc,cmap='spectral')
#pl.clim((np.compress(self.ff_unc.flatten()>0,self.ff_unc.flatten())).min(),(np.compress(self.ff_unc.flatten()>0,self.ff_unc.flatten())).max())
pl.colorbar()
if clim_unc == None:
ffubar = np.nanmean(self.ff_unc)
ffusigma = np.nanstd(self.ff_unc)
clim_unc=[ffubar-ffusigma*3,ffubar+ffusigma*3]
pl.clim(clim_unc)
pl.savefig(self.ff_path+"stats_6_ff.pdf")
pl.clf()
......@@ -1031,47 +1078,44 @@ class Flatfieldv2:
def plot_worst_pixel(self,):
# worst pixel versus a typical pixel as a function of image number
pl.figure(1)
pl.subplot(211)
pl.subplot(311)
x,y = np.where(self.I_sigma*self.tot_mask==np.nanmax(self.I_sigma*self.tot_mask))
tmp_data1 = self.data[:,x,y]
#tmp_data1 = self.data_dev[x[0],y[0]]**2
n, bins, patches = pl.hist(tmp_data1.flatten()[np.isfinite(tmp_data1.flatten())],np.arange(np.nanmin(tmp_data1),np.nanmax(tmp_data1),(np.nanmax(tmp_data1)-np.nanmin(tmp_data1))/10) , normed=0, facecolor='green', alpha=0.5)
n, bins, patches = pl.hist(tmp_data1.flatten()[np.isfinite(tmp_data1.flatten())],np.arange(np.nanmin(tmp_data1),np.nanmax(tmp_data1),(np.nanmax(tmp_data1)-np.nanmin(tmp_data1))/10) , normed=0, facecolor='green', alpha=0.5,label="worst pixel")
#pl.xlim(self.I_lims[0]-10,self.I_lims[1]+10)
mn1=np.nanmean(tmp_data1)
std1=np.nanstd(tmp_data1)
pl.xlim(mn1-3*std1,mn1+3*std1)
pl.ylabel('Frequency')
pl.title('Worst Pixel')
pl.subplot(212)
a = find_nearest(self.I_sigma*self.tot_mask,np.nanmean(self.I_sigma*self.tot_mask))
x,y = np.where(self.I_sigma*self.tot_mask==a)
print(x,y)
#print(x,y)
#tmp_data2 = (self.data_dev[x,y]/self.data[x,y])**2
#tmp_data2 = self.I_sigma*self.tot_mask
#tmp_data2 = self.data_dev[x,y]**2
tmp_data2 = self.data[:,x[0],y[0]]
n, bins, patches = pl.hist(tmp_data2.flatten()[np.isfinite(tmp_data2.flatten())],np.arange(np.nanmin(tmp_data2),np.nanmax(tmp_data2),(np.nanmax(tmp_data2)-np.nanmin(tmp_data2))/10) , normed=0, facecolor='black', alpha=0.5)
pl.title('Average Pixel')
n, bins, patches = pl.hist(tmp_data2.flatten()[np.isfinite(tmp_data2.flatten())],np.arange(np.nanmin(tmp_data2),np.nanmax(tmp_data2),(np.nanmax(tmp_data2)-np.nanmin(tmp_data2))/10) , normed=0, facecolor='black', alpha=0.5,label="average pixel")
#pl.xlim(self.I_lims[0]-10,self.I_lims[1]+10)
mn2=np.nanmean(tmp_data1)
std2=np.nanstd(tmp_data1)
pl.xlim(mn2-3*std2,mn2+3*std2)
pl.xlabel('Photon counts')
pl.ylabel('Frequency')
pl.savefig(self.ff_path+'stats_7_avg_wPix_hist.pdf')
pl.clf()
pl.legend(loc='best')
pl.figure(1)
pl.subplot(212)
pl.plot(tmp_data1.flatten(),'g', label = 'Worst Pixel')
pl.plot(tmp_data2.flatten(),'k', label = 'Average Pixel')
pl.ylim(self.I_lims[0],self.I_lims[1])
pl.xlabel('Exposure Number')
pl.ylabel('Photons counts')
pl.legend(loc='best')
pl.savefig(self.ff_path+'stats_7_avg_wPix_time.pdf')
pl.savefig(self.ff_path+'stats_7_avg_wPix_hist.pdf')
pl.clf()
def plot_rnd_ff_im(self,n_std=1):
......@@ -1105,9 +1149,10 @@ class Flatfieldv2:
pl.subplot(224)
pl.title('mean w/ ff corr')
data2plot = self.I_bar*self.tot_mask/self.I_bar_sf
pl.imshow(data2plot*self.ff,cmap='spectral')
pl.clim([np.nanmean(data2plot)-np.nanstd(data2plot)*n_std,np.nanmean(data2plot)+np.nanstd(data2plot)*n_std])
data2plot = (self.I_bar*self.tot_mask/self.I_bar_sf)*self.ff
pl.imshow(data2plot,cmap='spectral')
clim=[np.nanmean(data2plot)-np.nanstd(data2plot)*n_std,np.nanmean(data2plot)+np.nanstd(data2plot)*n_std]
pl.clim(clim)
pl.colorbar()
pl.savefig(self.ff_path+'image-ff_im_correction.pdf')
......@@ -1188,6 +1233,38 @@ class Flatfieldv2:
return ff,ff_unc
def make_ff_cxi(self, fn='ff.cxi', save_raw_ims=False):
# check the file doesn't exist
try:
os.listdir(self.ff_path).index(fn)
print("File exists")
q=input("would you like to overwrite it? [y/n]")
if q=='y':
ff_h5 = h5py.File(self.ff_path+fn,'w')
else:
new_fn = input("New filename: ")
ff_h5 = h5py.File(self.ff_path+fn,'w')
except ValueError:
print("File doesn't exist: Creating file")
ff_h5 = h5py.File(self.ff_path+fn,'w')
# add data to the file
ff_h5.create_group('/entry_1/instrument_1/detector_1')
if save_raw_ims:
ff_h5.create_group('/entry_1/data_1')
ff_h5.create_dataset('/entry_1/data_1/data', data = self.data,compression='gzip', compression_opts=9)
ff_h5.create_dataset('/entry_1/instrument_1/detector_1/flatfield', data = self.ff,compression='gzip', compression_opts=9)
ff_h5.create_dataset('/entry_1/instrument_1/detector_1/flatfield_rel_unc', data = self.ff_unc,compression='gzip', compression_opts=9)
ff_h5.create_dataset('/entry_1/instrument_1/detector_1/mask', data = self.tot_mask_cxi,compression='gzip', compression_opts=9)
for attr in list(self.__dict__.keys()):
try:
code ="ff_h5['ff/ff'].attrs['%s'] = self.%s"%(attr,attr)
exec(code)
print(code)
except:
pass
ff_h5.close()
# useful functions
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment