Commit 8ec7fb02 authored by myron's avatar myron
Browse files

added the fit by cmponents in C++

parent b2d5e8dd
cmake_minimum_required (VERSION 3.8)
option (USE_STATIC_LIBRARIES "Link with static libraries if available" OFF)
option (TRY_CUDA "compile Cuda files" OFF)
# c++ as a basis
project (FRSV LANGUAGES CXX C)
# optionally we can use cuda
# to compile conditionally
include(CheckLanguage)
find_package(MPI )
if(MPI_C_FOUND)
message(STATUS "YES : MPI support: configured variable MPI_C_COMPILER = ${MPI_C_COMPILER} " )
message(STATUS " MPI_C_COMPILE_FLAGS = ${MPI_C_COMPILEFLAGS} " )
message(STATUS " MPI_C_INCLUDE_PATH = ${MPI_C_INCLUDE_PATH} " )
message(STATUS " MPI_C_LINK_FLAGS = ${MPI_C_LINK_FLAGS} " )
message(STATUS " MPI_C_ LIBRARIES = ${MPI_C_LIBRARIES} " )
message(STATUS " MPI_CXX_COMPILER = ${MPI_CXX_COMPILER} " )
message(STATUS " MPI_CXX_COMPILE_FLAGS = ${MPI_CXX_COMPILEFLAGS} " )
message(STATUS " MPI_CXX_INCLUDE_PATH = ${MPI_CXX_INCLUDE_PATH} " )
message(STATUS " MPI_CXX_LINK_FLAGS = ${MPI_CXX_LINK_FLAGS} " )
message(STATUS " MPI_CXX_ LIBRARIES = ${MPI_CXX_LIBRARIES} " )
endif()
find_package(Boost 1.50 REQUIRED COMPONENTS filesystem system)
message(STATUS "Boost version: ${Boost_VERSION}")
if(Boost_FOUND)
message(" FOUND Boost")
message( Boost_INCLUDEDIR "${BOOST_INCLUDE_DIR}" )
message( Boost_LIBRARY_DIR_RELEASE "${BOOST_LIBRARY_DIR_RELEASE}" )
message( "${}" )
message( "${}" )
endif()
find_package(Yaml-cpp)
if(Yaml-cpp_FOUND)
message(" FOUND YAML-CPP")
message(STATUS "YAML_CPP_INCLUDE_DIR ${YAML_CPP_INCLUDE_DIR} " )
message(STATUS "YAML_CPP_LIBRARIES ${YAML_CPP_LIBRARIES} " )
message(STATUS " YAML_CPP_VERSION = ${YAML_CPP_VERSION} " )
message(STATUS " YAML_CPP_LIBRARY_DIR = ${YAML_CPP_LIBRARY_DIR} " )
endif()
include(CheckCXXCompilerFlag)
# The version number.
set (FRSV_VERSION_MAJOR 1)
set (FRSV_VERSION_MINOR 0)
# Set minimum C++ to 2011 standards
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# configure a header file to pass some of the CMake settings
# to the source code
configure_file (
"${PROJECT_SOURCE_DIR}/FRSVConfig.h.in"
"${PROJECT_BINARY_DIR}/FRSVConfig.h"
)
# add the binary tree to the search path for include files
# so that we will find FRSVConfig.h
include_directories("${PROJECT_BINARY_DIR}")
set(CMAKE_BUILD_TYPE RELEASE)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE DEBUG)
endif()
## set(CMAKE_CXX_FLAGS "-Wextra -Wall -mfpmath=sse -Ofast -flto -mcpu=native ${CMAKE_CXX_FLAGS} " )
set(CMAKE_CXX_FLAGS_DEBUG "-g -Wextra -Wall -fopenmp ${CMAKE_CXX_FLAGS} " )
set(CMAKE_CXX_FLAGS_RELEASE " -g -Wextra -Wall -fopenmp -Ofast -mtune=native${CMAKE_CXX_FLAGS} " )
## set(CMAKE_CXX_FLAGS_PIPPO "-g -Wextra -Wall -Ofast -flto -mtune=native ${CMAKE_CXX_FLAGS}" )
set(CMAKE_CXX_FLAGS_PIPPO "-g -Wextra -Wall ${CMAKE_CXX_FLAGS}" )
# set(CMAKE_CXX_FLAGS "-g -Wextra -Wall -mfpmath=sse -Ofast -mtune=native ${CMAKE_CXX_FLAGS}" )
add_executable (frsv frsv.cc)
target_link_libraries( frsv PRIVATE yaml-cpp Boost::filesystem Boost::system )
target_include_directories ( frsv PRIVATE "${PROJECT_SOURCE_DIR}")
// the configured options and settings for Tutorial
#define FRSV_VERSION_MAJOR @FRSV_VERSION_MAJOR@
#define FRSV_VERSION_MINOR @FRSV_VERSION_MINOR@
/* #define HASCUDA @HASCUDA@ */
/* #define HASAF @HASAF@ */
/* #cmakedefine USE_MYMATH */
import numpy as np
import h5py
import glob
import json
BATCH_PARALLELISM = 4
cenom = np.array(
[ 0, 12.91684471961497 ,
1, 12.91601322225362 ,
2, 12.915539860755496 ,
3, 12.916480273530496 ,
4, 12.915731392088265 ,
5, 12.914869749579399 ,
6, 12.916264935543595 ,
7, 12.91573965859977 ,
8, 12.914987140018741 ,
9, 12.917537037788902 ,
10, 12.916944061630726 ,
11, 12.917218345706756 ,
12, 12.915891035359477 ,
13, 12.916352608181027 ,
14, 12.917177018098796 ,
15, 12.914005418194852 ,
16, 12.915154657961107 ,
17, 12.916191453728914 ,
18, 12.914335816165174 ,
19, 12.915077949987177 ,
20, 12.915914957907148 ,
21, 12.915288356811502 ,
22, 12.91598889195904 ,
23, 12.91695899376287 ,
24, 12.916900296092969 ,
25, 12.91604539858451 ,
26, 12.914871297189173 ,
27, 12.916231311661814 ,
28, 12.915245568912306 ,
29, 12.914471665531154 ,
30, 12.916385293003266 ,
31, 12.91497576912035 ,
32, 12.914105451420639 ,
33, 12.917157692493532 ,
34, 12.915718809628475 ,
35, 12.915646376430596 ,
36, 12.91406510719366 ,
37, 12.914584874090105 ,
38, 12.914253022577238 ,
39, 12.914765752067101 ,
40, 12.913952660702538 ,
41, 12.914056540248373 ,
42, 12.914686992985423 ,
43, 12.914358771568466 ,
44, 12.914322364160563 ,
45, 12.914834493680436 ,
46, 12.91468849314193 ,
47, 12.914392397943747 ,
48, 12.91526519846475 ,
49, 12.916546273857113 ,
50, 12.91754238416663 ,
51, 12.91462504324549 ,
52, 12.915558554542379 ,
53, 12.916489766393653 ,
54, 12.914555294751848 ,
55, 12.91532484033084 ,
56, 12.916251843583483 ,
57, 12.916091870082658 ,
58, 12.91604809986116 ,
59, 12.917105964460655 ,
60, 12.916403610900606 ,
61, 12.916009142769497 ,
62, 12.915288063762233 ,
63, 12.916156971954502 ,
64, 12.91517887482311 ,
65, 12.914460401828784 ,
66, 12.916150384998303 ,
67, 12.915275296447305 ,
68, 12.914661911784687 ,
69, 12.917387263528335 ,
70, 12.916532490728155 ,
71, 12.915658283253318 ])
cenom=np.reshape(cenom,[-1,2])
Enominal = np.median(cenom[:,1])
cenom[:,1] -= Enominal
first_scann = 651 # 891
last_scann = 720 # 1015
thickness = 10 ### 25
roi_scann = 246 # 245 247 ####
other_rois_for_ref = [245,247]
#first_scann = 24
#last_scann = 26
n_energies = 7 # 6
import os
def process_input(s, go=0, exploit_slurm_mpi = 0, stop_omp = False):
open("input_tmp_%d.par"%go, "w").write(s)
background_activator = ""
if (go % BATCH_PARALLELISM ):
background_activator = "&"
prefix=""
if stop_omp:
prefix = prefix +"export OMP_NUM_THREADS=1 ;"
if ( exploit_slurm_mpi==0 ):
os.system(prefix +"mpirun -n 1 XRS_swissknife input_tmp_%d.par %s"%(go, background_activator))
elif ( exploit_slurm_mpi>0 ):
os.system(prefix + "mpirun XRS_swissknife input_tmp_%d.par %s"%(go, background_activator) )
else:
os.system(prefix + "mpirun -n %d XRS_swissknife input_tmp_%d.par %s"%(abs( exploit_slurm_mpi ), go, background_activator) )
def select_rois(roi_scan_num=6):
inputstring = """
create_rois:
expdata : '/data/id20/inhouse/data/run3_20/run3_es949/hydra'
scans : [{roi_scan_num}]
roiaddress : roi_{roi_scan_num}.h5:/extracted/ROI_AS_SELECTED
filter_path : mask.h5:/FILTER_MASK/filter
"""
s=inputstring.format(roi_scan_num = roi_scan_num )
process_input(s, exploit_slurm_mpi = 0 )
def extract_sample_givenrois(roi_scan_num=roi_scann, nick_name="org", Start=first_scann, End=(first_scann+thickness*n_energies), Thickness = thickness ):
inputstring = """
loadscan_2Dimages :
expdata : '/data/id20/inhouse/data/run3_20/run3_es949/hydra'
roiaddress : roi_{roi_scan_num}.h5:/extracted/ROI_AS_SELECTED
monitor_column : izero/0.000001
scan_interval : [{start}, {end}]
energy_column : sty
signaladdress : signals.h5:/{where}/_{start}_{end}
sumto1D : 0
monitorcolumn : izero/0.000001
"""
for start in range(Start,End, Thickness):
s=inputstring.format(start=str(start), end=str(start+Thickness) , where= nick_name ,roi_scan_num = roi_scan_num )
process_input(s, exploit_slurm_mpi = 1)
class InterpInfo:
def __init__(self, cenom, interp_file, source, custom_ene_list = None):
volum_list = list(interp_file[source].keys())
scan_num_list = np.array([ int( t.split("_") [1]) for t in volum_list])
ene_list = np.array([ interp_file[source][vn]["scans"]["Scan%03d"%sn ]["motorDict"]["energy"][()] for vn,sn in zip(volum_list, scan_num_list ) ])
print ( " ecco la scannumlist " , scan_num_list)
print (" ecco ene_list", ene_list)
self.volum_list = volum_list
self.scan_num_list = scan_num_list
self.ene_list = ene_list
order = np.argsort( self.ene_list )
self.ene_list = self.ene_list [order]
if custom_ene_list is None:
self.custom_ene_list = self.ene_list
else:
self.custom_ene_list = custom_ene_list
self.scan_num_list = self.scan_num_list [order]
self.volum_list = [ self.volum_list [ii] for ii in order ]
self.interp_file=interp_file
self.source= source
self.cenom=cenom
def interpola(self):
print ( " ECCO I DATI ")
print ( self.ene_list )
print ( self.cenom )
info_dict = {}
for i_intervallo in range(len(self.custom_ene_list)):
info_dict[str(i_intervallo)] = {}
info_dict[str(i_intervallo)]["E"] = self.custom_ene_list[ i_intervallo ]
info_dict[str(i_intervallo)]["coefficients"]={}
for t_vn, t_sn, t_ene in list(zip(self.volum_list, self.scan_num_list, self.ene_list )):
info_dict[str(i_intervallo)]["coefficients" ][ t_vn ]={}
for i_intervallo in range(len(self.custom_ene_list)-1):
cE1 = self.custom_ene_list[ i_intervallo ]
cE2 = self.custom_ene_list[ i_intervallo+1 ]
for t_vn, t_sn, t_ene in list(zip(self.volum_list, self.scan_num_list, self.ene_list ))[0:]:
for roi_num, de in enumerate( self.cenom ):
if t_ene+de < cE1 or t_ene+de > cE2:
continue
alpha = (cE2-(t_ene+de) )/(cE2-cE1)
info_dict[str(i_intervallo)]["coefficients" ][ str(t_vn) ][ str(roi_num) ] = alpha
info_dict[str(i_intervallo+1)]["coefficients"][ str(t_vn) ][ str(roi_num) ] = 1-alpha
print( info_dict)
return info_dict
def get_reference(roi_scan_num=roi_scann):
inputstring = """
loadscan_2Dimages :
expdata : '/data/id20/inhouse/data/run3_20/run3_es949/hydra'
roiaddress : roi_{roi_scan_num}.h5:/extracted/ROI_AS_SELECTED
monitor_column : izero/0.000001
scan_interval : [{roi_scan_num},{roi_scan_num_plus1} ]
signaladdress : calibration_scan
isolateSpot : 6
save_also_roi : True
sumto1D : 0
energycolumn : 'stx'
monitorcolumn : izero/0.000001
"""
s=inputstring.format( roi_scan_num= roi_scan_num, roi_scan_num_plus1= roi_scan_num+1 )
process_input( s , exploit_slurm_mpi = 1)
def resynthetise_scan(
old_scan_address="roi_%d.h5:/extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"%(roi_scann, roi_scann),
response_file = "reponse.h5" ,
target_filename = "newrois.h5:/ROIS/"
):
inputstring = """
superR_recreate_rois :
### we have calculated the responses in responsefilename
### and we want to enlarge the scan by a margin of 3 times
### the original scan on the right and on the left
### ( so for a total of a 7 expansion factor )
responsefilename : {response_file}
nex : 0
## the old scan covered by the old rois
old_scan_address : {old_scan_address}
## where new rois and bnew scan are written
target_filename : {target_filename}
filter_rois : 0
"""
s=inputstring.format( response_file = response_file , target_filename = target_filename, old_scan_address=old_scan_address )
process_input( s , exploit_slurm_mpi = 0, stop_omp = True)
def synthetise_response(scan_address=None, target_file=None):
inputstring = """
superR_fit_responses :
foil_scan_address : "{scan_address}"
nref : 7 # the number of subdivision per pixel dimension used to
# represent the optical response function at higher resolution
niter_optical : 100 # the number of iterations used in the optimisation of the optical
# response
beta_optical : 0.1 # The L1 norm factor in the regularisation
# term for the optical functions
pixel_dim : 1 # The pixel response function is represented with a
# pixel_dim**2 array
niter_pixel : 10 # The number of iterations in the pixel response optimisation
# phase. A negative number stands for ISTA, positive for FISTA
beta_pixel : 0.0 # L1 factor for the pixel response regularisation
## The used trajectories are always written whith the calculated response
## They can be reloaded and used as initialization(and freezed with do_refine_trajectory : 0 )
## Uncomment the following line if you want to reload a set of trajectories
## without this options trajectories are initialised from the spots drifts
##
# reload_trajectories_file : "response.h5"
filter_rois : 0
######
## The method first find an estimation of the foil scan trajectory on each roi
## then, based on this, obtain a fit of the optical response function
## assuming a flat pixel response. Finally the pixel response is optimised
##
## There is a final phase where a global optimisation
## is done in niter_global steps.
##
## Each step is composed of optical response fit, followed by a pixel response fit.
## If do_refine_trajectory is different from zero, the trajectory is reoptimised at each step
##
niter_global : 3
## if do_refine_trajectory=1 the start and end point of the trajectory are free
## if =2 then the start and end point are forced to a trajectory which is obtained
## from a reference scan : the foil scan may be short, then one can use the scan of
## an object to get another one : key *trajectory_reference_scan_address*
##
do_refine_trajectory : 1
## optional: only if do_refine_trajectory = 2
trajectory_reference_scansequence_address : "demo_newrois.h5:/ROI_FOIL/images/scans/"
trajectory_threshold : 0.1
## if the pixel response function is forced to be symmetrical
simmetrizza : 1
## where the found responses are written
target_file : {target_file}
# target_file : "fitted_responses.h5"
"""
s=inputstring.format( scan_address=scan_address , target_file=target_file )
process_input( s , exploit_slurm_mpi = 1, stop_omp = True)
def get_scalars( Start = first_scann, Thickness = thickness , roi_scan_num=roi_scann , nick = None, ref_file=None):
inputstring = """
superR_scal_deltaXimages_Esynt :
# sample_address : interpolated_signals.h5:/{nick}/_{start}_{end}/scans
sample_address : signals.h5:/{nick}/_{start}_{end}/scans
delta_address : {ref_file}/scans/Scan{roi_scan_num}
# delta_address : roi_{roi_scan_num}.h5:/extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan{roi_scan_num}
nbin : 1
optional_solution :
target_address : volumes.h5:/{nick}/_{start}_{end}/scal_prods
# roi_keys : "7"
"""
# volumes.h5:/{nick}/_{start}_{end}/volume
# volumes.h5:/{nick}/_{start}_{end}/volume
# inputstring = """
# superR_scal_deltaXimages :
# sample_address : {signals_file}:/{nick}/_{start}_{end}/scans
# delta_address : roi_{roi_scan_num}.h5:/extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan00{roi_scan_num}
# nbin : 2
# optional_solution : volumes.h5:/{nick}/_{start}_{end}/volume
# target_address : volumes.h5:/{nick}/_{start}_{end}/scal_prods
# """
s=inputstring.format(start=Start, end=Start+Thickness , roi_scan_num = roi_scan_num, nick=nick , ref_file=ref_file )
process_input(s, exploit_slurm_mpi = 1)
def get_volume(nick = "reinterp_org" ):
inputstring = """
superR_getVolume_Esynt :
scalprods_address : volumes.h5:/{nick}
target_address : volumes.h5:/{nick}/volumes
dict_interp : interpolation.json
debin : [1, 1]
output_prefix : volumes/test0_
"""
s=inputstring.format( nick=nick )
print ( " INPUT ", s)
process_input(s, exploit_slurm_mpi = 1)
def myOrder(tok):
if("volume" not in tok):
tokens = tok.split("_")
print( tokens)
return int(tokens[1])*10000+ int(tokens[2])
else:
return 0
def reshuffle( volumefile = "volumes.h5", nick = None ):
h5file_root = h5py.File( volumefile ,"r+" )
h5file = h5file_root[nick]
scankeys = list( h5file.keys())
scankeys.sort(key=myOrder)
print( scankeys)
volumes = []
for k in scankeys:
if k[:1]!="_":
continue
print( k)
if "volume" in h5file[k]:
volumes.append( h5file[k]["volume"] )
# volume = np.concatenate(volumes,axis=0)
volume = np.array(volumes)
if "concatenated_volume" in h5file:
del h5file["concatenated_volume"]
h5file["concatenated_volume"]=volume
h5file_root.close()
## THE FOLLOWING PART IS THE RELEVANT ONE
if(0): # ROI selection and reference scan
select_rois(roi_scan_num=roi_scann)
if(0): # SAMPLE extraction
extract_sample_givenrois(roi_scan_num=roi_scann, nick_name="org", Start = first_scann, End = (first_scann+thickness*n_energies), Thickness = thickness )
if(0): # INTERPOLATION
interp_file_source = h5py.File("signals.h5","r+")
# i_info = InterpInfo( cenom[:,1] , interp_file_source, "org", custom_ene_list = [ 12.913005 + 2.0e-4 , 13.253006, 13.25551, 13.258008, 13.260506 , 13.263004, 13.265505 -2.0e-4 ] )
i_info = InterpInfo( cenom[:,1] , interp_file_source, "org", custom_ene_list = np.arange(13.253006, 13.265505 , 0.01/10) )
info_dict = i_info.interpola()
interp_file_source.close()
json.dump(info_dict,open("interpolation.json","w"))
if(0): # of course we need the REFERENCE SCAN
clip1= 90
clip2= 180
## get_reference(roi_scan_num=247)
get_reference(roi_scan_num=roi_scann)
for other in other_rois_for_ref:
os.system("cp roi_%d.h5 roi_%d.h5"%(roi_scann, other) )
if clip1 is not None:
ftarget = h5py.File( "roi_%d.h5" % roi_scann ,"r+")
target_group = ftarget["extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"% roi_scann ]
for k in target_group.keys():
if k != "motorDict":
print(" SHRINKING scan for ROI %s in file roi_%d.h5 " %( k, roi_scann ))
for dsn in ["matrix", "monitor", "xscale"]:
mat = target_group[k][dsn][()]
del target_group[k][dsn]
target_group[k][dsn] = mat[clip1:clip2]
ftarget.close()
for other in other_rois_for_ref:
get_reference(roi_scan_num=other)
ftarget = h5py.File( "roi_%d.h5" % roi_scann ,"r+")
fsource = h5py.File( "roi_%d.h5" % other , "r")
source_group = fsource["extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"% other ]
target_group = ftarget["extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"% roi_scann ]
for k in target_group.keys():
if k != "motorDict":
print(" ADDING data for ROI %s from file roi_%d.h5 " %( k, other ))
mat = source_group[k]["matrix"][()]
if clip1 is not None:
mat = mat[clip1:clip2]
target_group[k]["matrix"][:] += mat
print( " SUCCESS ")
if(0):
## resintesi ; fit of the reponse
synthetise_response(
scan_address="roi_%d.h5:/extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"%(roi_scann, roi_scann),
target_file = "reponse.h5:/FIT"
)
if(0):
## resintesi : scan rerynsthesis
resynthetise_scan(
old_scan_address="roi_%d.h5:/extracted/ROI_AS_SELECTED/calibration_scan/scans/Scan%03d"%(roi_scann, roi_scann),
response_file = "reponse.h5:/FIT" ,
target_filename = "newrois.h5:/ROIS"
)
if(0): ## The scala products, which define the equation to invert
for start in range(first_scann,(first_scann+n_energies*thickness),thickness):
get_scalars( Start = start, Thickness = thickness , roi_scan_num=roi_scann ,nick="org" , ref_file = "newrois.h5:/ROIS" )
if(1): # inversion of the equations
get_volume(nick = "org")
if(0):
fl = glob.glob("volumes_*.h5")
target = h5py.File("volumes.h5","r+" )
for fn in fl:
source = h5py.File( fn ,"r" )
keylist = list( source.keys() )
for k in keylist:
keylist2 = list( source[k].keys() )
for k2 in keylist2:
print(" copiando ", k,k2, " da ", fn)
if k2 +"/volume" in target[k]: