Commit e16f4398 authored by Sebastien Petitdemange's avatar Sebastien Petitdemange Committed by Vincent Michel

pep8

parent 1caf9975
import os, errno
import os
import errno
import h5py
from bliss.scanning.chain import AcquisitionDevice,AcquisitionMaster
from bliss.scanning.chain import AcquisitionDevice, AcquisitionMaster
class FileOrganizer(object):
def __init__(self,root_path,
def __init__(self, root_path,
windows_path_mapping=None,
detector_temporay_path=None,**keys):
detector_temporay_path=None, **keys):
""" A default way to organize file structure
windows_path_mapping -- transform unix path to windows
......@@ -17,35 +19,40 @@ class FileOrganizer(object):
self._windows_path_mapping = windows_path_mapping or dict()
self._detector_temporay_path = detector_temporay_path or dict()
class Hdf5Organizer(FileOrganizer):
def __init__(self,root_path,**keys):
FileOrganizer.__init__(self,root_path,**keys)
def __init__(self, root_path, **keys):
FileOrganizer.__init__(self, root_path, **keys)
self.file = None
def _acq_device_event(self, event_dict=None, signal=None, sender=None):
print 'received', signal, 'from', sender, ":", event_dict
def prepare(self,scan_recorder,scan_info,devices_tree):
path_suffix = scan_recorder.node.db_name().replace(':',os.path.sep)
full_path = os.path.join(self._root_path,path_suffix)
def prepare(self, scan_recorder, scan_info, devices_tree):
path_suffix = scan_recorder.node.db_name().replace(':', os.path.sep)
full_path = os.path.join(self._root_path, path_suffix)
try:
os.makedirs(full_path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
else:
raise
self.file = h5py.File(os.path.join(full_path,'data.h5'))
scan_entry = h5py.Group(self.file,scan_recorder.name,create=True)
self.file = h5py.File(os.path.join(full_path, 'data.h5'))
scan_entry = h5py.Group(self.file, scan_recorder.name, create=True)
scan_entry.attrs['NX_class'] = 'NXentry'
measurement = h5py.Group(scan_entry,'measurement',create=True)
measurement = h5py.Group(scan_entry, 'measurement', create=True)
master_id = 0
for dev,node in scan_recorder.nodes.iteritems():
if isinstance(dev,AcquisitionMaster):
master_entry = h5py.Group(measurement,'master%d' % master_id,create=True)
for dev, node in scan_recorder.nodes.iteritems():
if isinstance(dev, AcquisitionMaster):
master_entry = h5py.Group(
measurement, 'master%d' % master_id, create=True)
master_id += 1
for slave in dev.slaves:
if isinstance(slave,AcquisitionDevice):
for signal in ('start', 'end', 'new_ref','new_data'):
dispatcher.connect(self._acq_device_event, signal,dev)
if isinstance(slave, AcquisitionDevice):
for signal in ('start', 'end', 'new_ref', 'new_data'):
dispatcher.connect(
self._acq_device_event, signal, dev)
......@@ -30,7 +30,8 @@ def wrap_methods(from_object, target_object):
if inspect.ismethod(getattr(from_object, name)):
if hasattr(target_object, name) and inspect.ismethod(getattr(target_object, name)):
continue
setattr(target_object, name, types.MethodType(WrappedMethod(from_object, name), target_object, target_object.__class__))
setattr(target_object, name, types.MethodType(WrappedMethod(
from_object, name), target_object, target_object.__class__))
def add_conversion_function(obj, method_name, function):
......@@ -58,15 +59,19 @@ def add_property(inst, name, method):
def grouped(iterable, n):
"s -> (s0,s1,s2,...sn-1), (sn,sn+1,sn+2,...s2n-1), (s2n,s2n+1,s2n+2,...s3n-1), ..."
return itertools.izip(*[iter(iterable)]*n)
return itertools.izip(*[iter(iterable)] * n)
def all_equal(iterable):
g = itertools.groupby(iterable)
return next(g, True) and not next(g, False)
"""
functions to add custom attributes and commands to an object.
"""
def add_object_method(obj, method, pre_call, name=None, args=[], types_info=(None, None)):
if name is None:
......@@ -98,27 +103,31 @@ def object_method(method=None, name=None, args=[], types_info=(None, None), filt
# Returns a method where _object_method_ attribute is filled with a
# dict of elements to characterize it.
method._object_method_ = dict(name=name, args=args, types_info=types_info, filter=filter)
method._object_method_ = dict(
name=name, args=args, types_info=types_info, filter=filter)
return method
def object_method_type(method=None, name=None, args=[], types_info=(None, None), type=None):
f = lambda x: isinstance(x, type)
def f(x): return isinstance(x, type)
return object_method(method=method, name=name, args=args, types_info=types_info, filter=f)
def add_object_attribute(obj, name=None, fget=None, fset=None, args=[], type_info=None, filter=None):
obj._add_custom_attribute(name, fget, fset, type_info)
"""
decorators for set/get methods to access to custom attributes
"""
def object_attribute_type_get(get_method=None, name=None, args=[], type_info=None, type=None):
f = lambda x: isinstance(x, type)
def f(x): return isinstance(x, type)
return object_attribute_get(get_method=get_method, name=name, args=args, type_info=type_info, filter=f)
def object_attribute_get(get_method=None, name=None, args=[], type_info=None, filter=None):
if get_method is None:
return functools.partial(object_attribute_get, name=name, args=args,
......@@ -130,19 +139,22 @@ def object_attribute_get(get_method=None, name=None, args=[], type_info=None, fi
if attr_name.startswith("get_"):
attr_name = attr_name[4:] # removes leading "get_"
get_method._object_method_ = dict(name=name, args=args, types_info=("None", type_info), filter=filter)
get_method._object_method_ = dict(
name=name, args=args, types_info=("None", type_info), filter=filter)
if not hasattr(get_method, "_object_attribute_"):
get_method._object_attribute_ = dict()
get_method._object_attribute_.update(name=attr_name, fget=get_method, args=args, type_info=type_info, filter=filter)
get_method._object_attribute_.update(
name=attr_name, fget=get_method, args=args, type_info=type_info, filter=filter)
return get_method
def object_attribute_type_set(set_method=None, name=None, args=[], type_info=None, type=None):
f = lambda x: isinstance(x, type)
def f(x): return isinstance(x, type)
return object_attribute_set(set_method=set_method, name=name, args=args, type_info=type_info, filter=f)
def object_attribute_set(set_method=None, name=None, args=[], type_info=None, filter=None):
if set_method is None:
return functools.partial(object_attribute_set, name=name, args=args,
......@@ -154,11 +166,13 @@ def object_attribute_set(set_method=None, name=None, args=[], type_info=None, fi
if attr_name.startswith("set_"):
attr_name = attr_name[4:] # removes leading "set_"
set_method._object_method_ = dict(name=name, args=args, types_info=(type_info, "None"), filter=filter)
set_method._object_method_ = dict(
name=name, args=args, types_info=(type_info, "None"), filter=filter)
if not hasattr(set_method, "_object_attribute_"):
set_method._object_attribute_ = dict()
set_method._object_attribute_.update(name=attr_name, fset=set_method, args=args, type_info=type_info, filter=filter)
set_method._object_attribute_.update(
name=attr_name, fset=set_method, args=args, type_info=type_info, filter=filter)
return set_method
......@@ -237,7 +251,8 @@ def with_custom_members(klass):
access_mode = 'r' if fget else ''
access_mode += 'w' if fset else ''
if fget is None and fset is None:
raise RuntimeError("impossible case: must have fget or fset...")
raise RuntimeError(
"impossible case: must have fget or fset...")
custom_attrs[name] = type_info, access_mode
klass._get_custom_methods = _get_custom_methods
......@@ -250,7 +265,6 @@ def with_custom_members(klass):
return klass
class Null(object):
__slots__ = []
......
This diff is collapsed.
......@@ -14,21 +14,22 @@ from bliss.config.conductor import client
from bliss.config import channels
import gevent
class LimaDataNode(DataNode):
class DataChannel(object):
def __init__(self,dataset):
def __init__(self, dataset):
self._dataset = dataset
self._device_proxy = None
self._image_mode = {
0 : numpy.uint8,
1 : numpy.uint16,
2 : numpy.uint32,
4 : numpy.int8,
5 :numpy.int16 ,
6 : numpy.int32 ,
0: numpy.uint8,
1: numpy.uint16,
2: numpy.uint32,
4: numpy.int8,
5: numpy.int16,
6: numpy.int32,
}
def get(self,from_index,to_index = None):
def get(self, from_index, to_index=None):
cnx = self._dataset._data._cnx()
url = self._dataset._data.url_server
if url is None: # data is no more available
......@@ -36,8 +37,8 @@ class LimaDataNode(DataNode):
current_lima_acq = int(cnx.get(url))
(lima_acq_nb,acq_nb_buffer,
LastImageAcquired,LastCounterReady,
(lima_acq_nb, acq_nb_buffer,
LastImageAcquired, LastCounterReady,
LastImageSaved) = [int(x) for x in cnx.hmget(self.db_name,
'lima_acq_nb',
'acq_nb_buffer',
......@@ -46,11 +47,11 @@ class LimaDataNode(DataNode):
'LastImageSaved')]
if to_index is None:
#first we try to get image directly from the server
# first we try to get image directly from the server
if current_lima_acq == lima_acq_nb: # current acquisition
if LastImageAcquired < from_index: # image is not yet available
raise RuntimeError('image is not yet available')
#should be still in server memory
# should be still in server memory
if acq_nb_buffer > LastImageAcquired - from_index:
try:
if self._device_proxy is None:
......@@ -66,26 +67,28 @@ class LimaDataNode(DataNode):
else:
raise NotImplementedError('Not yet done')
def _tango_unpack(self,msg):
def _tango_unpack(self, msg):
struct_format = '<IHHIIHHHHHHHHHHHHHHHHHHIII'
header_size = struct.calcsize(struct_format)
values = struct.unpack(msg[:header_size])
if values[0] != 0x44544159:
raise RuntimeError('Not a lima data')
header_offset = values[2]
data = numpy.fromstring(msg[header_offset:],data=self._image_mode.get(values[4]))
data.shape = values[8],values[7]
data = numpy.fromstring(
msg[header_offset:], data=self._image_mode.get(values[4]))
data.shape = values[8], values[7]
return data
def _read_from_file(self,from_index):
def _read_from_file(self, from_index):
#@todo should read file from any format?????
for saving_parameters in self._dataset._saving_params:
pass
def __init__(self,name,**keys):
DataNode.__init__(self,'lima',name,**keys)
def __init__(self, name, **keys):
DataNode.__init__(self, 'lima', name, **keys)
saving_params_name = '%s_saving_params' % self.db_name()
self._saving_params = QueueObjSetting(saving_params_name, connection=self.db_connection)
self._saving_params = QueueObjSetting(
saving_params_name, connection=self.db_connection)
self._storage_task = None
def channel_name(self):
......@@ -93,13 +96,13 @@ class LimaDataNode(DataNode):
#@brief update image status
#
def update_status(self,image_status):
def update_status(self, image_status):
cnx = self._data._cnx()
db_name = self.db_name()
pipeline = cnx.pipeline()
for key,value in image_status.iteritems():
pipeline.hset(db_name,key,value)
for key, value in image_status.iteritems():
pipeline.hset(db_name, key, value)
pipeline.execute()
def _end_storage(self):
......@@ -115,8 +118,8 @@ class LimaDataNode(DataNode):
local_dict = self._new_image_status
self._new_image_status = dict()
if local_dict:
self.db_connection.hmset(self.db_name(),local_dict)
if self._stop_flag :
self.db_connection.hmset(self.db_name(), local_dict)
if self._stop_flag:
break
gevent.idle()
......@@ -137,35 +140,34 @@ class LimaDataNode(DataNode):
self._new_image_status.update(local_dict)
self._new_image_status_event.set()
#@brief set the number of buffer for this acquisition
def set_nb_buffer(self,acq_nb_buffer):
def set_nb_buffer(self, acq_nb_buffer):
self._data.acq_nb_buffer = acq_nb_buffer
#@brief set the server url and
#calculate an unique id for this acquisition
def set_server_url(self,url):
# calculate an unique id for this acquisition
def set_server_url(self, url):
self._data.url_server = url
cnx = self._data._cnx()
self._data.lima_acq_nb = cnx.incr(url)
def set_acq_parameters(self,acq_params):
self.set_info('acq_params',acq_params)
def set_acq_parameters(self, acq_params):
self.set_info('acq_params', acq_params)
#@brief saving parameters
def add_saving_parameters(self,parameters):
def add_saving_parameters(self, parameters):
self._saving_params.append(parameters)
if self._ttl > 0:
self._saving_params.ttl(self._ttl)
#@brief this methode should retrives all files
#references for this data set
# references for this data set
def get_file_references(self):
#take the last in list because it's should be the final
# take the last in list because it's should be the final
final_params = self._saving_params[-1]
acq_params = self._info['acq_params']
#in that case only one reference will be return
# in that case only one reference will be return
overwritePolicy = final_params['overwritePolicy'].lower()
if overwritePolicy == 'multiset':
last_file_number = final_params['nextNumber'] + 1
......@@ -189,8 +191,7 @@ class LimaDataNode(DataNode):
references.append(full_path)
return references
#@brief for now lima has only on data channel
#we will provide in a second time all counters (roi_counters,roi_spectrum...)
def get_channel(self,**keys):
# we will provide in a second time all counters (roi_counters,roi_spectrum...)
def get_channel(self, **keys):
return DatasetLima.DataChannel(self)
This diff is collapsed.
......@@ -12,44 +12,50 @@ import pickle
from bliss.data.node import DataNode
def _transform_dict_obj(dict_object) :
def _transform_dict_obj(dict_object):
return_dict = dict()
for key,value in dict_object.iteritems():
for key, value in dict_object.iteritems():
return_dict[key] = _transform(value)
return return_dict
def _transform_iterable_obj(iterable_obj):
return_list = list()
for value in iterable_obj:
return_list.append(_transform(value))
return return_list
def _transform_obj_2_name(obj):
return obj.name if hasattr(obj,'name') else obj
return obj.name if hasattr(obj, 'name') else obj
def _transform(var):
if isinstance(var,dict):
if isinstance(var, dict):
var = _transform_dict_obj(var)
elif isinstance(var,(tuple,list)):
elif isinstance(var, (tuple, list)):
var = _transform_iterable_obj(var)
else:
var = _transform_obj_2_name(var)
return var
def pickle_dump(var):
var = _transform(var)
return pickle.dumps(var)
class Scan(DataNode):
def __init__(self,name,create=False,**keys):
DataNode.__init__(self,'scan',name,create=create,**keys)
def __init__(self, name, create=False, **keys):
DataNode.__init__(self, 'scan', name, create=create, **keys)
self.__create = create
if create:
start_time_stamp = time.time()
start_time = datetime.datetime.fromtimestamp(start_time_stamp)
self._data.start_time = start_time
self._data.start_time_str = start_time.strftime("%a %b %d %H:%M:%S %Y")
self._data.start_time_str = start_time.strftime(
"%a %b %d %H:%M:%S %Y")
self._data.start_time_stamp = start_time_stamp
self._info._write_type_conversion = pickle_dump
......@@ -61,6 +67,7 @@ class Scan(DataNode):
self._data.end_time_str = end_time.strftime("%a %b %d %H:%M:%S %Y")
self._data.end_time_stamp = end_time_stamp
def get_data(scan):
"""
Return a numpy structured arrays
......@@ -74,18 +81,19 @@ def get_data(scan):
max_channel_len = 0
connection = scan.node.db_connection
pipeline = connection.pipeline()
for device,node in scan.nodes.iteritems():
for device, node in scan.nodes.iteritems():
if node.type() == 'zerod':
for channel_name in node.channels_name():
chan = node.get_channel(channel_name,check_exists=False,cnx=pipeline)
chan = node.get_channel(
channel_name, check_exists=False, cnx=pipeline)
chanlist.append(channel_name)
chan.get(0,-1) # all data
dtype.append((channel_name,'f8'))
chan.get(0, -1) # all data
dtype.append((channel_name, 'f8'))
result = pipeline.execute()
max_channel_len = max((len(values) for values in result))
data = numpy.zeros(max_channel_len,dtype=dtype)
for channel_name,values in zip(chanlist,result):
data = numpy.zeros(max_channel_len, dtype=dtype)
for channel_name, values in zip(chanlist, result):
a = data[channel_name]
nb_data = len(values)
a[0:nb_data] = values[0:nb_data]
......
......@@ -9,12 +9,14 @@ import collections
from bliss.config.settings import QueueSetting
from bliss.data.node import DataNode
class Dataset0D(DataNode):
class DataChannel(object):
def __init__(self,channel_db_name,cnx) :
def __init__(self, channel_db_name, cnx):
self._queue = QueueSetting(channel_db_name,
connection=cnx)
def get(self,from_index,to_index = None):
def get(self, from_index, to_index=None):
if to_index is None:
return self._queue[from_index]
else:
......@@ -23,22 +25,24 @@ class Dataset0D(DataNode):
def __len__(self):
return self._queue.__len__()
def __init__(self,name,**keys):
DataNode.__init__(self,'zerod',name,**keys)
def __init__(self, name, **keys):
DataNode.__init__(self, 'zerod', name, **keys)
cnx = self.db_connection
self._channels_name = QueueSetting('%s_channels' % self.db_name(),connection=cnx)
self._channels_name = QueueSetting(
'%s_channels' % self.db_name(), connection=cnx)
self._channels = {}
for channel_name in self._channels_name:
self._channels[channel_name] = QueueSetting('%s_%s' % (self.db_name(),channel_name),
self._channels[channel_name] = QueueSetting('%s_%s' % (self.db_name(), channel_name),
connection=cnx)
def channels_name(self) :
def channels_name(self):
return list(self._channels_name)
def store(self,signal,event_dict) :
def store(self, signal, event_dict):
if signal == "new_data":
channel_data = event_dict.get("channel_data")
if channel_data is None:
#warning
# warning
return
for channel_name, data in channel_data.iteritems():
if data.size == 0:
......@@ -46,7 +50,7 @@ class Dataset0D(DataNode):
queue = self._channels.get(channel_name)
if queue is None:
self._channels_name.append(channel_name)
queue = QueueSetting('%s_%s' % (self.db_name(),channel_name),
queue = QueueSetting('%s_%s' % (self.db_name(), channel_name),
connection=self.db_connection)
self._channels[channel_name] = queue
try:
......@@ -57,24 +61,26 @@ class Dataset0D(DataNode):
queue.extend(data)
#@brief get data channel object
def get_channel(self,channel_name = None,check_exists = True,cnx = None) :
def get_channel(self, channel_name=None, check_exists=True, cnx=None):
if channel_name is None:
channel_name = self._channels_name[0]
elif check_exists and channel_name not in self._channels_name:
raise ValueError("Unknown channel %s" % channel_name)
channel_db_name = '%s_%s' % (self.db_name(),channel_name)
return Dataset0D.DataChannel(channel_db_name,self.db_connection if cnx is None else cnx)
channel_db_name = '%s_%s' % (self.db_name(), channel_name)
return Dataset0D.DataChannel(channel_db_name, self.db_connection if cnx is None else cnx)
def get_all_channels(self):
"""
return all channels for this node
the return is a dict {channel_name:DataChannel}
"""
return dict(((chan_name,self.get_channel(chan_name))
return dict(((chan_name, self.get_channel(chan_name))
for chan_name in self._channels_name))
def _get_db_names(self):
db_names = DataNode._get_db_names(self)
db_names.append(self._channels_name._name)
db_names.extend((channel._name for channel in self._channels.itervalues()))
db_names.extend(
(channel._name for channel in self._channels.itervalues()))
return db_names
......@@ -6,9 +6,11 @@ import numpy
import gevent
import sys
class CalcAcquisitionDevice(AcquisitionDevice):
def __init__(self, name, src_acq_devices_list, func, output_channels_list, type="zerod"):
AcquisitionDevice.__init__(self, None, name, type, trigger_type=AcquisitionDevice.HARDWARE)
AcquisitionDevice.__init__(
self, None, name, type, trigger_type=AcquisitionDevice.HARDWARE)
self.src_acq_devices_list = src_acq_devices_list
self.func = func
self.channels.extend(output_channels_list)
......@@ -25,7 +27,8 @@ class CalcAcquisitionDevice(AcquisitionDevice):
output_channels_data_dict = self.func(sender, channel_data)
if output_channels_data_dict:
dispatcher.send("new_data", self, { "channel_data": output_channels_data_dict })
dispatcher.send("new_data", self, {
"channel_data": output_channels_data_dict})
def start(self):
return
......
......@@ -14,6 +14,7 @@ from ..chain import AcquisitionDevice, AcquisitionChannel
from bliss.common.measurement import GroupedReadMixin
from bliss.common.utils import all_equal
class BaseCounterAcquisitionDevice(AcquisitionDevice):
def __init__(self, counter, count_time, **keys):
npoints = max(1, keys.pop('npoints', 1))
......@@ -32,7 +33,8 @@ class BaseCounterAcquisitionDevice(AcquisitionDevice):
self._nb_acq_points = 0
if not isinstance(counter, GroupedReadMixin):
self.channels.append(AcquisitionChannel(counter.name, numpy.double, (1,)))
self.channels.append(AcquisitionChannel(
counter.name, numpy.double, (1,)))
self.__counter_names.append(counter.name)
@property
......@@ -49,19 +51,22 @@ class BaseCounterAcquisitionDevice(AcquisitionDevice):
def add_counter(self, counter):
if not isinstance(self.device, GroupedReadMixin):
raise RuntimeError("Cannot add counter to single-read counter acquisition device")
raise RuntimeError(
"Cannot add counter to single-read counter acquisition device")
self.__grouped_read_counters_list.append(counter)
self.__counter_names.append(counter.name)
self.channels.append(AcquisitionChannel(counter.name, numpy.double, (1,)))
self.channels.append(AcquisitionChannel(
counter.name, numpy.double, (1,)))
def _emit_new_data(self, data):
channel_data = dict([ (name, data[i]) for i, name in enumerate(self.counter_names) ])
channel_data = dict([(name, data[i])
for i, name in enumerate(self.counter_names)])
dispatcher.send("new_data", self, {"channel_data": channel_data})
class SamplingCounterAcquisitionDevice(BaseCounterAcquisitionDevice):
SIMPLE_AVERAGE,TIME_AVERAGE,INTEGRATE = range(3)
SIMPLE_AVERAGE, TIME_AVERAGE, INTEGRATE = range(3)
def __init__(self, counter, count_time=None, mode=SIMPLE_AVERAGE, **keys):
"""
......@@ -78,7 +83,8 @@ class SamplingCounterAcquisitionDevice(BaseCounterAcquisitionDevice):