Commit 744b01b8 authored by myron's avatar myron

Merge branch 'nextversion'

# Conflicts:
#	PyHST/Cspace/gputomo.cu
parents e1648203 86c3e6ab
This diff is collapsed.
......@@ -86,6 +86,7 @@ typedef struct {
int ff_file_list_lenght;
char **ff_file_list;
float *ff_indexes_coefficients;
int NFF;
int * my_proj_num_list;
int * tot_proj_num_list;
int * proj_num_offset_list;
......@@ -416,7 +417,9 @@ typedef struct {
float DETECTOR_DISTANCE ;
float SOURCE_X ;
float SOURCE_Y ;
float DECT_PSI;
float DECT_TILT;
float DZPERPROJ;
float DXPERPROJ;
......@@ -963,7 +966,7 @@ struct Gpu_Context_struct {
int dimrecx, dimrecy;
float * d_SLICE ;
float * d_cos_s, * d_sin_s, * d_axis_s ;
// set by user
int num_x ;
int num_y ;
......@@ -1017,6 +1020,10 @@ struct Gpu_Context_struct {
float DETECTOR_DISTANCE ;
float SOURCE_X ;
float SOURCE_Y ;
float tilt_psi;
float tilt_tilt;
int * CONICITY_MARGIN_UP ;
int * CONICITY_MARGIN_DOWN ;
......
......@@ -60,6 +60,14 @@ int cspace_iproc=0;
#include "numpy/arrayobject.h"
#define DEBUG(a) if(cspace_iproc==0) printf("%s \n", a);
#undef NDEBUG
#include<hdf5.h>
herr_t my_hdf5_error_handler (void *unused)
{
fprintf (stderr, "An HDF5 error was detected. Bye.\n");
exit (1);
}
/*
* The error object to expose
......@@ -395,6 +403,11 @@ Cspace_new( PyObject *self, PyObject *args)
char * CCD_FILTER_ptr;
char * RING_FILTER_ptr;
H5Eset_auto1 (my_hdf5_error_handler, NULL);
res= (Cspace*) PyObject_NEW(Cspace , &Cspacetype);
res->myCCspace = (CCspace*) malloc(sizeof( CCspace));
res->myCCspace->ff2_status = -1;
......@@ -638,8 +651,10 @@ Cspace_new( PyObject *self, PyObject *args)
res->myCCspace->params.SOURCE_DISTANCE = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"SOURCE_DISTANCE" ) );
res->myCCspace->params.DETECTOR_DISTANCE = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"DETECTOR_DISTANCE" ) );
res->myCCspace->params.SOURCE_X = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"SOURCE_X" ) );
// res->myCCspace->params.SOURCE_X = res->myCCspace->params.ROTATION_AXIS_POSITION ;
res->myCCspace->params.SOURCE_Y = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"SOURCE_Y" ) );
res->myCCspace->params.DECT_PSI = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"DECT_PSI" ) );
res->myCCspace->params.DECT_TILT = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"DECT_TILT" ) );
res->myCCspace->params.DZPERPROJ = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"DZPERPROJ" ) );
res->myCCspace->params.DXPERPROJ = cpyutils_o2f (cpyutils_getattribute(res->OParameters ,"DXPERPROJ" ) );
......@@ -1142,9 +1157,18 @@ Cspace_configure_readings(PyObject *self_a, PyObject *args)
INITIALISEARRAY2D( self->myCCspace->reading_infos.ff_indexes_coefficients , "ff_indexes_coefficients",
self->myCCspace->reading_infos.proj_file_list_lenght, 6, NPY_FLOAT32);
{
int NFF=0;
int i;
for( i=0; i< self->myCCspace->reading_infos.proj_file_list_lenght; i++) {
NFF = max(NFF,self->myCCspace->reading_infos.ff_indexes_coefficients[6*i+0] ) ;
NFF = max(NFF,self->myCCspace->reading_infos.ff_indexes_coefficients[6*i+1] ) ;
}
self->myCCspace->reading_infos.NFF = NFF+1 ;
}
if(self->myCCspace->params.CORRECT_FLATFIELD) {
if( self->myCCspace->params.STEAM_INVERTER==0) {
......
This diff is collapsed.
......@@ -95,6 +95,18 @@ void read_data_from_h5( char * filename,
);
void read_projSequence_from_h5( char * filename,
char * dsname,
int Ninterval,
int *nproj_intervals,
float *target , // target_float
long int pos0 , long int pos1 ,
long int size0 , long int size1,
int rotation_vertical,
int binning
) ;
void read_data_from_edf_eli( char * filename,
float *dataptr , // target_float
int filedatatype, //
......@@ -121,6 +133,29 @@ void read_data_from_edf_eli( char * filename,
);
void read_data_from_h5_eli( char * filename,
char * dsname,
int nproj,
float *target , // target_float
long int pos0 , long int pos1 ,
long int size0 , long int size1,
int rotation_vertical,
int binning,
float DZPERPROJ,
float *Ddark,
int correct_ff,
float *ff_infos,
float **FFs,
int doubleffcorrection,
int takelog,
float *ffcorr,
sem_t * semaforo_ptr
) ;
void write_data_to_edf( float *SLICE, int num_y,int num_x, const char * nomeout ) ;
......
This diff is collapsed.
......@@ -392,15 +392,15 @@ def get_proj_reading(preanalisi=0):
file_list = [ P.FILE_PREFIX for i in file_proj_num_list]
########################################################
# proj_num_list will addresses projections pointed by a list
if P.DZPERPROJ!=0:
my_proj_num_list = numpy.array( seqnum_list )
else:
my_proj_num_list = numpy.arange(len(seqnum_list ))
# con elicoidale sara accompagnato da proj_num_offset_list
########################################################
# proj_num_list will addresses projections pointed by a list
if P.DZPERPROJ!=0:
my_proj_num_list = numpy.array( seqnum_list )
else:
my_proj_num_list = numpy.arange(len(seqnum_list ))
# con elicoidale sara accompagnato da proj_num_offset_list
if Parameters.VERBOSITY>2 :print( " DEBUG ", " my_proj_num_list " , my_proj_num_list[0], " ... ", my_proj_num_list[-1])
if Parameters.VERBOSITY>2 :print( " DEBUG ", " my_proj_num_list " , my_proj_num_list[0], " ... ", my_proj_num_list[-1])
if P.CORR_PROJ_FILE is not None:
......@@ -473,7 +473,6 @@ def get_proj_reading(preanalisi=0):
ff_file_list = []
ff_indexes_coefficients =[[-1 ,-1 , 0.0, 0.0,-1,-1]]*len(file_proj_num_list) # *len(my_proj_num_list)
## each proch irank will processes proj_mpi_numpjs[iproc] projections
# starting from Nproj = proj_mpi_offsets[irank]
......@@ -723,7 +722,7 @@ class Parameters:
Remove or not edf decompressed (from jp2) files at the end.
"""
PROJ_DS_NAME="setthisfieldifyouusehdf5forprojections"
PROJ_DS_NAME="data"
"""
the data set name for projections inside the FILE_PREFIX in case of hdf5.
The asked hdf5 datatype is native float.
......@@ -829,7 +828,7 @@ class Parameters:
This is the prefix used to compose the FF filename. if it ends with .h5 it will be considered hdf5.
In the latter case set also FF_DS_NAME which is the name of the 3D dataset from which FF will be read.
"""
FF_DS_NAME = "setthisfieldifyouusehdf5forffs"
FF_DS_NAME = "data"
"""
the data set name for flat field inside the FILE_PREFIX in case of hdf5.
The asked hdf5 datatype is native float : see discussion above for FILE_PREFIX
......@@ -877,7 +876,7 @@ class Parameters:
If it ends by .h5 it is considered as hdf5. Set in this case BACKGROUND_DS_NAME.
"""
BACKGROUND_DS_NAME = None
BACKGROUND_DS_NAME = "data"
"""
the data set name for BACKGROUND inside the FILE_PREFIX in case of hdf5
The asked hdf5 datatype is native float : see discussion above for FILE_PREFIX
......@@ -1885,6 +1884,9 @@ Units are pixels per projection.
"""
DECT_PSI = 0.0
DECT_TILT = 0.0
STEAM_INVERTER = 0
"""
When STEAM INVERTER=1 PyHST goes from volume to projections.
......@@ -2263,8 +2265,13 @@ def memory_estimator( P, NSLICESATONCE, n_projs_max, n_ff_max, PIECE_MARGE_touse
## --------------------- C -------------------------------
## request.data
sum=sum+ P.NUM_IMAGE_1*P.numpjs_span * (NSLICESATONCE *1.0/nprocs+2*c_margin )
sum=sum+ P.NUM_IMAGE_1*P.numpjs_span * (NSLICESATONCE *1.0/nprocs+2*c_margin )*2
## accounting for extra buffers
sum = sum + P.NUM_IMAGE_1*sizeV*4* P.NPBUNCHES
if P.CONICITY:
sum=sum+ (1+P.END_VOXEL_1-P.START_VOXEL_1)*(1+P.END_VOXEL_1-P.START_VOXEL_1)*(NSLICESATONCE*1.0/nprocs+4)
sum=sum+ P.NUM_IMAGE_1*P.numpjs_span*(2*c_margin*P.NPBUNCHES+NSLICESATONCE*1.0/nprocs+4)
......@@ -2285,14 +2292,17 @@ if not Parameters.DO_V3D_UNSHARP and Parameters.NSLICESATONCE is None and (sys.
f_tmp = (P.SOURCE_DISTANCE+P.DETECTOR_DISTANCE)/(float(P.SOURCE_DISTANCE)) # ??? /P.IMAGE_PIXEL_SIZE_2
VOXEL_SIZE = P.IMAGE_PIXEL_SIZE_1/f_tmp
Fact = (P.SOURCE_DISTANCE+P.DETECTOR_DISTANCE)/(float(P.SOURCE_DISTANCE))*VOXEL_SIZE*1.0/P.IMAGE_PIXEL_SIZE_2
DFa = +(abs(P.DECT_PSI)+abs(P.DECT_TILT) )*3.2/180.0*2
DR1 = (P.END_VOXEL_1 - P.START_VOXEL_1)/2.0
DR2 = (P.END_VOXEL_2 - P.START_VOXEL_2)/2.0
RADIUS = math.sqrt( DR1*DR1+DR2*DR2 )
alpha = (max (abs(P.START_VOXEL_3),abs(P.END_VOXEL_3) ) *VOXEL_SIZE/1.0e6)/(P.SOURCE_DISTANCE)
C_MARGIN = int( RADIUS*
alpha*Fact
C_MARGIN = int( RADIUS*(
alpha*(Fact)+DFa )
+0.01)+1
if Parameters.VERBOSITY>1 : print( " C_MARGIN " , C_MARGIN)
......@@ -2726,8 +2736,11 @@ if (sys.argv[0][-12:]!="sphinx-build") and not P.DO_V3D_UNSHARP :
f_tmp = (P.SOURCE_DISTANCE+P.DETECTOR_DISTANCE)/(float(P.SOURCE_DISTANCE)) # ??? /P.IMAGE_PIXEL_SIZE_2
P.VOXEL_SIZE = P.IMAGE_PIXEL_SIZE_1/f_tmp
Fact = (P.SOURCE_DISTANCE+P.DETECTOR_DISTANCE)/(float(P.SOURCE_DISTANCE))*P.VOXEL_SIZE*1.0/P.IMAGE_PIXEL_SIZE_2
Fact = (P.SOURCE_DISTANCE+P.DETECTOR_DISTANCE)/(float(P.SOURCE_DISTANCE))*P.VOXEL_SIZE*1.0/P.IMAGE_PIXEL_SIZE_2
DFa = +(abs(P.DECT_PSI)+abs(P.DECT_TILT) )*3.2/180.0*2
DR1 = (P.END_VOXEL_1 - P.START_VOXEL_1)/2.0
DR2 = (P.END_VOXEL_2 - P.START_VOXEL_2)/2.0
RADIUS = math.sqrt( DR1*DR1+DR2*DR2 )
......@@ -2741,8 +2754,8 @@ if (sys.argv[0][-12:]!="sphinx-build") and not P.DO_V3D_UNSHARP :
else:
alpha = (abs(P.first_slices[i])*P.VOXEL_SIZE/1.0e6)/(P.SOURCE_DISTANCE)
P.CONICITY_MARGIN_DOWN[i] = int( RADIUS*
alpha*Fact
P.CONICITY_MARGIN_DOWN[i] = int( RADIUS*(
alpha*(Fact) +DFa)
+0.01)+1
if P.DZPERPROJ!=0:
......@@ -2751,8 +2764,8 @@ if (sys.argv[0][-12:]!="sphinx-build") and not P.DO_V3D_UNSHARP :
alpha = (abs(P.last_slices[i])*P.VOXEL_SIZE/1.0e6)/(P.SOURCE_DISTANCE)
P.CONICITY_MARGIN_UP[i] = int( RADIUS*
alpha*Fact
P.CONICITY_MARGIN_UP[i] = int( RADIUS*(
alpha*(Fact) +DFa)
+0.01)+1
......@@ -2857,7 +2870,7 @@ def create_arrays_space_as_dictionary(proj_reading_dict):
if Parameters.VERBOSITY>2 : print( " rawdatatokens " )
if Parameters.VERBOSITY>2 : print( dims)
rawdatatokens = [ numpy.zeros( dims ,"f") for i in range(ntokensraw) ]
rawdatatokens = [ numpy.zeros( dims ,"f") for i in range(ntokensraw) ]
ff_rawdatatokens = [ numpy.zeros(ff_dims ,"f") for i in range(ntokensraw) ]
dims = numpy.max(P.size_s, axis=0)
......
......@@ -30,5 +30,5 @@
# is a problem for you.
#############################################################################*/
version = "2019b"
version = "h5fast"
import h5py
import sys
import numpy as np
import os
def create_virtual_file( vsource, pro_indxs, datasource_shape, mydtype, filename ):
layout = h5py.VirtualLayout(shape= tuple([ len( pro_indxs ) ]+ list(datasource_shape)[1:]), dtype=mydtype)
for n in range( len( pro_indxs ) ):
layout[n] = vsource[ pro_indxs[n] ]
with h5py.File(filename, "w", libver="latest") as f:
f.create_virtual_dataset("data", layout=layout, fillvalue=-5)
fn = sys.argv[1]
dn = sys.argv[2]
rep = sys.argv[3]
pyhst2_input = """
"""
entry = h5py.File(fn,"r")[dn]
pro_indxs = []
ff_indxs = []
ff_poss = []
bck_indxs = []
keys = entry["instrument/detector"]["image_key"]
old_ff_pos = -1
ffblock = None
ipro = 0
for i,k in enumerate(list(keys)):
if k== 2:
bck_indxs.append(i)
if k== 0:
pro_indxs.append(i)
ipro += 1
if k==1:
if ipro== old_ff_pos:
ffblock.append(i)
else:
ffblock = []
ff_indxs.append( ffblock )
ff_poss.append(ipro )
old_ff_pos = ipro
datasource_shape = entry["instrument/detector/data"] .shape
mydtype = entry["instrument/detector/data"].dtype
vsource = h5py.VirtualSource(fn, dn+"/instrument/detector/data", shape= datasource_shape )
filename = os.path.join(rep,"pyhst2_projs.h5")
create_virtual_file( vsource, pro_indxs, datasource_shape, mydtype, filename )
pyhst2_input += """
FILE_PREFIX={}
PROJ_DS_NAME = "data"
""".format( os.path.join(rep,"pyhst2_projs.h5") )
if len(bck_indxs):
filename = os.path.join(rep,"pyhst2_background.h5")
create_virtual_file( vsource, bck_indxs, datasource_shape, mydtype, filename )
pyhst2_input += """
BACKGROUND_FILE={}
BACKGROUND_DS_NAME = "data"
""".format( os.path.join(rep,"pyhst2_projs.h5") )
s_files="""["""
s_intervals="""["""
if len(ff_indxs):
for ffblock, pos in zip(ff_indxs, ff_pos ) :
s_intervals += """ {} ,""".format(pos)
fname = "pyhst2_ff%05d.h5"%ff_pos
fname = os.path.join(rep, fname)
s_files+=""" "{}",""".format(fname=
create_virtual_file( vsource, ffblock, datasource_shape, mydtype, fname )
pyhst2_input += """
FF_PREFIX={ffprefix}]
FF_DS_NAME = "data"
FF_INTERVALS={intervals}]
""".format( ffprefix=s_files[:-1] , intervals=s_intervals[:-1] )
print (pyhst2_input)
......@@ -327,7 +327,7 @@ def do_pyhst():
cython_c_ext = ".pyx"
else:
cython_c_ext = ".cpp"
from distutils.command.build_ext import build_ext
from distutils.command.build_ext import build_ext, Extension
# We subclass the build_ext class in order to handle compiler flags
......@@ -436,14 +436,14 @@ def do_pyhst():
module = Extension(name='PyHST2_'+version+'.Cspace',
sources=c_sorgenti ,
depends=depends,
sources=c_sorgenti ,
depends=depends,
library_dirs= mpilibs_dirs,
libraries=["fftw3f_threads", "fftw3f",hdf5_lib, "mpi"],
libraries=["fftw3f_threads", "fftw3f",hdf5_lib, "mpi"],
extra_link_args=['-fopenmp'] ,
extra_compile_args={'gcc': ["-fPIC",'-fopenmp' ]},
define_macros=define_macros,
include_dirs=[ CUDA['include'], numpy.get_include()] + mpi_dirs + hdf5_dirs )
extra_compile_args={'gcc': ["-fPIC",'-fopenmp',"-g" ]},
define_macros=define_macros,
include_dirs=[ CUDA['include'], numpy.get_include()] + mpi_dirs + hdf5_dirs )
return module
......
"""A simple example of building a virtual dataset.
This makes four 'source' HDF5 files, each with a 1D dataset of 100 numbers.
Then it makes a single 4x100 virtual dataset in a separate file, exposing
the four sources as one dataset.
"""
import h5py
import numpy as np
# create some sample data
data = np.arange(0, 100).reshape(1, 100) + np.arange(1, 9).reshape(8, 1)
# Create source files (0.h5 to 3.h5)
for n in range(4):
with h5py.File(f"{n}.h5", "w") as f:
d = f.create_dataset("data", (2,100,), "i4", data[2*n:2*n+2])
# Assemble virtual dataset
layout = h5py.VirtualLayout(shape=(4, 100), dtype="i4")
for n in range(4):
filename = "{}.h5".format(n)
vsource = h5py.VirtualSource(filename, "data", shape=(2,100,))[1]
layout[n] = vsource
# Add virtual dataset to output file
with h5py.File("VDS.h5", "w", libver="latest") as f:
f.create_virtual_dataset("vdata", layout, fillvalue=-5)
# f.create_dataset("data", data=data, dtype="i4")
# read data back
# virtual dataset is transparent for reader!
with h5py.File("VDS.h5", "r") as f:
print("Virtual dataset:")
print(f["vdata"][:, :10])
# print("Normal dataset:")
# print(f["data"][:, :10])
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment