Commit d6977ca5 authored by Damien Naudet's avatar Damien Naudet

Using QSpaceH5.

parent 0e0c44e9
......@@ -46,7 +46,7 @@ def viewWidgetFromProjectEvent(project, event):
index = event.index
processId = index.data(ModelRoles.XsocsProcessId)
eventData = event.data
print index, processId, eventData
widgetCls = None
xsocsType = index.data(ModelRoles.XsocsNodeType)
if xsocsType == h5py.ExternalLink:
......@@ -72,15 +72,6 @@ def viewWidgetFromProjectEvent(project, event):
widget.setPlotData(*plotData)
return widget
# # show raw data
# plotData = event.plotData()
# x, y, data = plotData
# widget = RealSpaceWidget(index)
# widget.setPlotData(x, y, data)
# elif processLevel == XsocsProject.XsocsQSpace:
# # show qspace data
# pass
# else:
print('Nothing to DO')
return None
......
......@@ -28,3 +28,64 @@ from __future__ import absolute_import
__authors__ = ["D. Naudet"]
__license__ = "MIT"
__date__ = "15/09/2016"
import h5py
from .ProjectDef import ProcessId
from .HybridItem import HybridItem
from ...io.QSpaceH5 import QSpaceH5
from .ProjectItem import ProjectItem
from .ProjectDef import ItemClassDef
@ItemClassDef('QSpaceItem')
class QSpaceItem(ProjectItem):
QSpaceFilePath = 'File'
AcqParamsPath = 'AcqParams'
SumPath = 'Sum'
def __init__(self, *args, **kwargs):
super(QSpaceItem, self).__init__(*args, **kwargs)
self.__qspaceFile = None
@property
def qspaceFile(self):
""" The name of the input data file. """
if self.__qspaceFile is None:
with self._get_file() as h5f:
path = self.path + '/' + QSpaceItem.QSpaceFilePath
group = h5f.get(path)
if group:
self.__qspaceFile = group.file.filename
del group
return self.__qspaceFile
@qspaceFile.setter
def qspaceFile(self, qspace_f):
# TODO : make sure file exists and is readable
if self.qspaceFile is not None:
raise ValueError('Xsocs input file is already set.')
# adding a link to the source file
qspaceH5 = QSpaceH5(qspace_f)
self.__qspaceFile = qspace_f
qspaceRoot = '/'.join([self.path, QSpaceItem.QSpaceFilePath])
self.add_file_link(qspaceRoot, qspace_f, '/')
intensityGrp = HybridItem(self.filename,
QSpaceItem.SumPath,
processLevel=ProcessId.QSpace)
sumPath = qspaceRoot + '/' + QSpaceH5.qspace_sum_path
xPath = qspaceRoot + '/' + QSpaceH5.sample_x_path
yPath = qspaceRoot + '/' + QSpaceH5.sample_y_path
sumLink = h5py.SoftLink(sumPath)
xLink = h5py.SoftLink(xPath)
yLink = h5py.SoftLink(yPath)
with qspaceH5:
intensityGrp.setScatter(xLink,
yLink,
sumLink)
......@@ -57,7 +57,7 @@ class SourceItem(ProjectItem):
""" The name of the input data file. """
if self.__xsocsFile is None:
with self._get_file() as h5f:
path = self.path + SourceItem.XSocsFilePath
path = self.path + '/' + SourceItem.XSocsFilePath
group = h5f.get(path)
if group:
self.__xsocsFile = group.file.filename
......@@ -74,15 +74,15 @@ class SourceItem(ProjectItem):
raise ValueError('Xsocs input file is already set.')
# adding a link to the source file
xsocsH5 = h5f = XsocsH5(xsocs_f)
xsocsH5 = XsocsH5(xsocs_f)
self.__xsocsFile = xsocs_f
path = '/'.join([self.path, SourceItem.XSocsFilePath])
self.add_file_link(path, xsocs_f, '/')
# adding parameter values to the source folder
entries = h5f.entries()
entries = xsocsH5.entries()
# TODO : make sure that all parameters are consistent
scan_params = h5f.scan_params(entries[0])
scan_params = xsocsH5.scan_params(entries[0])
path_tpl = '{0}/{1}/{{0}}'.format(self.path, SourceItem.AcqParamsPath)
for key, value in scan_params.items():
......@@ -111,14 +111,14 @@ class SourceItem(ProjectItem):
path_tpl.format(entry_stripped,
'intensity'),
processLevel=ProcessId.Input)
data = h5f.image_cumul(entry)
pos_0, pos_1 = h5f.scan_positions(entry)
data = xsocsH5.image_cumul(entry)
pos_0, pos_1 = xsocsH5.scan_positions(entry)
# intensity as a scatter plot
dataGrp.setScatter(pos_0, pos_1, data)
# intensity as an image
scan_params = h5f.scan_params(entry)
scan_params = xsocsH5.scan_params(entry)
# xSlice = np.s_[0:scan_params['motor_0_steps']:1]
# ySlice = np.s_[0::scan_params['motor_0_steps']]
# dataGrp.setImageFromScatter(xSlice, ySlice)
......
......@@ -30,20 +30,148 @@ __license__ = "MIT"
__date__ = "15/09/2016"
import weakref
from collections import OrderedDict
from contextlib import contextmanager
import h5py as _h5py
import numpy as _np
from .XsocsH5Base import XsocsH5Base
class QspaceH5(XsocsH5Base):
class QSpaceH5(XsocsH5Base):
qspace_path = 'Data/qspace'
qx_path = 'Data/qx'
qy_path = 'Data/qy'
qz_path = 'Data/qz'
histo_path = 'Data/histo'
sample_x_path = 'Data/sample_x'
sample_y_path = 'Data/sample_y'
qspace_sum_path = 'Data/qspace_sum'
def __init__(self, entry, h5_f, mode='r'):
super(QspaceH5, self).__init__(h5_f, mode=mode)
def __init__(self, h5_f, mode='r'):
super(QSpaceH5, self).__init__(h5_f, mode=mode)
self.__entry = entry
@contextmanager
def qspace_dset_ctx(self):
"""
Context manager for the image dataset.
WARNING: only to be used as a context manager!
WARNING: the data set must exist. see also QSpaceH5Writer.init_cube
"""
with self._get_file() as h5_file:
qspace_dset = h5_file[QSpaceH5.qspace_path]
yield weakref.proxy(qspace_dset)
del qspace_dset
# def
\ No newline at end of file
@contextmanager
def qspace_sum_dset_ctx(self):
"""
Context manager for the image dataset.
WARNING: only to be used as a context manager!
WARNING: the data set must exist. see also QSpaceH5Writer.init_cube
"""
with self._get_file() as h5_file:
qspace_sum_dset = h5_file[QSpaceH5.qspace_sum_path]
yield weakref.proxy(qspace_sum_dset)
del qspace_sum_dset
qspace = property(lambda self: self._get_array_data(QSpaceH5.qspace_path))
qx = property(lambda self: self._get_array_data(QSpaceH5.qx_path))
qy = property(lambda self: self._get_array_data(QSpaceH5.qy_path))
qz = property(lambda self: self._get_array_data(QSpaceH5.qz_path))
sample_x = property(lambda self:
self._get_array_data(QSpaceH5.sample_x_path))
sample_y = property(lambda self:
self._get_array_data(QSpaceH5.sample_y_path))
histo = property(lambda self: self._get_array_data(QSpaceH5.histo_path))
qspace_sum = property(lambda self:
self._get_array_data(QSpaceH5.qspace_sum_path))
class QSpaceH5Writer(QSpaceH5):
cube_dtype = _np.float32
histo_dtype = _np.int32
position_dtype = _np.float32
q_bins_dtype = _np.float64
def __init__(self, h5_f, mode='a'):
self.mode = mode
super(QSpaceH5Writer, self).__init__(h5_f, mode=mode,)
self.__cube_init = False
def init_file(self,
n_positions,
qspace_shape,
qspace_chunks=None,
qspace_sum_chunks=None,
compression='lzf'):
# TODO : mode this to XsocsH5Base ('init_dataset')
if not self.__cube_init:
with self._get_file() as h5f:
shapes = [(n_positions,) + qspace_shape,
qspace_shape[0:1],
qspace_shape[1:2],
qspace_shape[2:3],
qspace_shape,
(n_positions,),
(n_positions,),
(n_positions,)]
paths = [QSpaceH5.qspace_path,
QSpaceH5.qx_path,
QSpaceH5.qy_path,
QSpaceH5.qz_path,
QSpaceH5.histo_path,
QSpaceH5.sample_x_path,
QSpaceH5.sample_y_path,
QSpaceH5.qspace_sum_path]
dtypes = [QSpaceH5Writer.cube_dtype,
QSpaceH5Writer.q_bins_dtype,
QSpaceH5Writer.q_bins_dtype,
QSpaceH5Writer.q_bins_dtype,
QSpaceH5Writer.histo_dtype,
QSpaceH5Writer.position_dtype,
QSpaceH5Writer.position_dtype,
QSpaceH5Writer.cube_dtype]
chunks = [qspace_chunks,
None, None, None, None, None, None,
qspace_sum_chunks]
params = zip(shapes, paths, dtypes, chunks)
for shape, path, dtype, chunk in params:
print shape, path, dtype, chunk
h5f.require_dataset(path,
shape=shape,
dtype=dtype,
compression=compression,
chunks=chunk)
def set_qx(self, qx):
self._set_array_data(QSpaceH5.qx_path, qx)
def set_qy(self, qy):
self._set_array_data(QSpaceH5.qy_path, qy)
def set_qz(self, qz):
self._set_array_data(QSpaceH5.qz_path, qz)
def set_sample_x(self, sample_x):
self._set_array_data(QSpaceH5.sample_x_path, sample_x)
def set_sample_y(self, sample_y):
self._set_array_data(QSpaceH5.sample_y_path, sample_y)
def set_histo(self, histo):
self._set_array_data(QSpaceH5.histo_path, histo)
def set_qspace_sum(self, qspace_sum):
self._set_array_data(QSpaceH5.qspace_sum_path, qspace_sum)
def set_position_data(self, pos_idx, qspace, qspace_sum):
with self._get_file() as h5f:
h5f[QSpaceH5.qspace_path][pos_idx] = qspace
h5f[QSpaceH5.qspace_sum_path][pos_idx] = qspace_sum
......@@ -29,7 +29,7 @@ __authors__ = ["D. Naudet"]
__license__ = "MIT"
__date__ = "15/09/2016"
import weakref
from functools import partial
from contextlib import contextmanager
......@@ -147,6 +147,22 @@ class XsocsH5Base(object):
with self._get_file() as h5_file:
h5_file[in_path] = _h5py.ExternalLink(file_name, ext_path)
@contextmanager
def item_context(self, item_path, **kwargs):
"""
Context manager for the image dataset.
WARNING: only to be used as a context manager!
WARNING: the data set must exist. see also QSpaceH5Writer.init_cube
"""
no_proxy = kwargs.get('no_proxy') is not None
with self._get_file() as h5_file:
item = h5_file[item_path]
if no_proxy:
yield item
else:
yield weakref.proxy(item)
del item
def copy_group(self, src_h5f, src_path, dest_path):
"""
Recursively copies an object from one HDF5 file to another.
......
......@@ -28,7 +28,6 @@ __authors__ = ["D. Naudet"]
__date__ = "01/06/2016"
__license__ = "MIT"
import os
import time
import ctypes
import multiprocessing as mp
......@@ -39,6 +38,7 @@ import numpy as np
from scipy.optimize import leastsq
#from silx.math import curve_fit
from ..io import QSpaceH5
disp_times = False
......@@ -105,22 +105,19 @@ def peak_fit(qspace_f,
if fit_type == FitTypes.CENTROID:
fit_fn = _qspace_centroid
with h5py.File(qspace_f, 'r') as qspace_h5:
with QSpaceH5.QSpaceH5(qspace_f) as qspace_h5:
with qspace_h5.qspace_dset_ctx() as dset:
qdata_shape = dset.shape
q_x = qspace_h5['bins_edges/x'][:]
q_y = qspace_h5['bins_edges/y'][:]
q_z = qspace_h5['bins_edges/z'][:]
qdata = qspace_h5['data/qspace']
n_points = qdata.shape[0]
n_points = qdata_shape[0]
if indices is None:
indices = range(n_points)
n_indices = len(indices)
x_pos = qspace_h5['geom/x'][indices]
y_pos = qspace_h5['geom/y'][indices]
x_pos = qspace_h5.sample_x[indices]
y_pos = qspace_h5.sample_y[indices]
shared_res = mp_sharedctypes.RawArray(ctypes.c_double, n_indices * 9)
# TODO : find something better
......@@ -128,9 +125,32 @@ def peak_fit(qspace_f,
success = np.ndarray((n_indices,), dtype=np.bool)
success[:] = True
# this has to be done otherwise h5py complains about not being
# able to open compressed datasets from other processes
del qdata
# with h5py.File(qspace_f, 'r') as qspace_h5:
#
# q_x = qspace_h5['bins_edges/x'][:]
# q_y = qspace_h5['bins_edges/y'][:]
# q_z = qspace_h5['bins_edges/z'][:]
# qdata = qspace_h5['data/qspace']
#
# n_points = qdata.shape[0]
#
# if indices is None:
# indices = range(n_points)
#
# n_indices = len(indices)
#
# x_pos = qspace_h5['geom/x'][indices]
# y_pos = qspace_h5['geom/y'][indices]
#
# shared_res = mp_sharedctypes.RawArray(ctypes.c_double, n_indices * 9)
# # TODO : find something better
# shared_success = mp_sharedctypes.RawArray(ctypes.c_bool, n_indices)
# success = np.ndarray((n_indices,), dtype=np.bool)
# success[:] = True
#
# # this has to be done otherwise h5py complains about not being
# # able to open compressed datasets from other processes
# del qdata
results = np.ndarray((n_indices, 11), dtype=np.double)
results[:, 0] = x_pos
......@@ -256,17 +276,29 @@ def _fit_process(th_idx):
results = np.frombuffer(shared_res)
results.shape = result_shape
success = np.frombuffer(shared_success, dtype=bool)
qspace_h5 = QSpaceH5.QSpaceH5(qspace_f)
#TODO : timeout to check if it has been canceled
#read_lock.acquire()
with h5py.File(qspace_f, 'r') as qspace_h5:
q_x = qspace_h5['bins_edges/x'][:]
q_y = qspace_h5['bins_edges/y'][:]
q_z = qspace_h5['bins_edges/z'][:]
q_shape = qspace_h5['data/qspace'].shape
q_dtype = qspace_h5['data/qspace'].dtype
mask = np.where(qspace_h5['histo'][:] > 0)
weights = qspace_h5['histo'][:][mask]
# with h5py.File(qspace_f, 'r') as qspace_h5:
# q_x = qspace_h5['bins_edges/x'][:]
# q_y = qspace_h5['bins_edges/y'][:]
# q_z = qspace_h5['bins_edges/z'][:]
# q_shape = qspace_h5['data/qspace'].shape
# q_dtype = qspace_h5['data/qspace'].dtype
# mask = np.where(qspace_h5['histo'][:] > 0)
# weights = qspace_h5['histo'][:][mask]
with qspace_h5 as qspace_h5:
q_x = qspace_h5.qx
q_y = qspace_h5.qy
q_z = qspace_h5.qz
with qspace_h5.qspace_dset_ctx() as dset:
q_shape = dset.shape
q_dtype = dset.dtype
histo = qspace_h5.histo
mask = np.where(histo > 0)
weights = histo[mask]
#read_lock.release()
#print weights.max(), min(weights)
cube = np.ascontiguousarray(np.zeros(q_shape[1:]),
......@@ -287,10 +319,14 @@ def _fit_process(th_idx):
print('Processing cube {0}/{1}.'.format(i_cube, result_shape[0]))
t0 = time.time()
with h5py.File(qspace_f, 'r') as qspace_h5:
qspace_h5['data/qspace'].read_direct(cube,
source_sel=np.s_[i_cube],
dest_sel=None)
# with h5py.File(qspace_f, 'r') as qspace_h5:
# qspace_h5['data/qspace'].read_direct(cube,
# source_sel=np.s_[i_cube],
# dest_sel=None)
with qspace_h5.qspace_dset_ctx() as dset:
dset.read_direct(cube,
source_sel=np.s_[i_cube],
dest_sel=None)
t_read += time.time() - t0
t0 = time.time()
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment