Commit 7d4251e8 authored by payno's avatar payno

[io] rework the frm_dict and to_dict functions to keep trace about processing

parent d1adc360
......@@ -38,4 +38,5 @@ _logger = logging.getLogger(__name__)
def suite():
test_suite = unittest.TestSuite()
test_suite.addTest(test_exec_suite())
test_suite.addTest(test_reprocessing_suite())
return test_suite
......@@ -81,7 +81,8 @@ def read_frm_file(file_path):
class XASReader(object):
"""Simple reader of a xas file"""
def read_frm_url(self, spectra_url, channel_url, dimensions=None,
@staticmethod
def read_frm_url(spectra_url, channel_url, dimensions=None,
config_url=None):
sp, en, conf = read_xas(spectra_url=spectra_url,
channel_url=channel_url,
......@@ -89,26 +90,27 @@ class XASReader(object):
dimensions=dimensions)
return XASObject(spectra=sp, energy=en, configuration=conf)
def read_from_file(self, file_path):
@staticmethod
def read_from_file(file_path):
"""
:param str file_path:
:return: `.XASObject`
"""
if file_path.endswith('.dat'):
return self.read_frm_url(
return XASReader.read_frm_url(
spectra_url=DataUrl(file_path=file_path,
scheme='PyMca'),
channel_url=DataUrl(file_path=file_path,
scheme='PyMca'))
elif file_path.endswith('.xmu'):
return self.read_frm_url(
return XASReader.read_frm_url(
spectra_url=DataUrl(file_path=file_path,
scheme='larch'),
channel_url=DataUrl(file_path=file_path,
scheme='larch'))
elif h5py.is_hdf5(file_path):
return self.read_frm_url(
return XASReader.read_frm_url(
spectra_url=DataUrl(file_path=file_path,
scheme='silx',
data_path=DEFAULT_SPECTRA_PATH),
......@@ -124,7 +126,6 @@ class XASReader(object):
__call__ = read_from_file
class XASWriter(object):
"""
class to write the output file. In this case we need a class in order to
......
......@@ -35,6 +35,7 @@ import shutil
import tempfile
import unittest
import h5py
import numpy
from silx.io.dictdump import h5todict
from silx.io.url import DataUrl
from est.core.process.roi import xas_roi, _ROI as XASROI
......@@ -115,7 +116,12 @@ class TestStreamSingleSpectrum(unittest.TestCase):
roi = XASROI(origin=(0, 2), size=(5, 1))
xas_obj = XASObject(spectra=spectra, energy=energy,
configuration={'roi': roi.to_dict()})
out = xas_roi(xas_obj.to_dict())
dict_xas_obj = xas_obj.to_dict()
self.assertTrue('spectra' in dict_xas_obj.keys())
self.assertTrue('energy' in dict_xas_obj.keys())
tmp_obj = XASObject.from_dict(dict_xas_obj)
numpy.testing.assert_array_equal(tmp_obj.energy, tmp_obj.spectra[0].energy)
out = xas_roi(dict_xas_obj)
out.configuration = {'EXAFS': self.exafs_configuration, 'SET_KWEIGHT': 0}
out = pymca_normalization(xas_obj=out)
out = pymca_exafs(xas_obj=out)
......@@ -263,7 +269,8 @@ class TestSaveFlowAuto(unittest.TestCase):
# check one configuration
stored_config = h5todict(self.h5_file,
path='/scan1/xas_process_2/configuration')
path='/scan1/xas_process_2/configuration',
asarray=False)
for key in ('KMin', 'KMax'):
self.assertTrue(configuration[key] == stored_config[key])
......
......@@ -123,6 +123,9 @@ class ROIProcess(Process):
for key in xas_obj.spectra_keys():
if isinstance(xas_obj.spectra[0][key], numpy.ndarray):
# there is no processing for the _larch_grp_members case
if key == '_larch_grp_members':
continue
volume = xas_obj._spectra_volume(spectra=xas_obj.spectra,
key=key,
dim_1=xas_obj.dim1,
......
......@@ -32,10 +32,12 @@ import os
import unittest
import tempfile
import h5py
from est.core.types import Spectrum, XASObject
import shutil
from est.core.types import Spectrum, XASObject, Dim
from est.core.utils import spectra as spectra_utils
from est.core.io import read as read_xas
from silx.io.url import DataUrl
import json
import silx.io.utils
try:
import PyMca5
......@@ -92,6 +94,9 @@ class TestXASObject(unittest.TestCase):
ddict = obj.to_dict()
obj2 = XASObject.from_dict(ddict)
self.assertEqual(obj2, obj)
# insure the XASObject is serializable
# import json
# json.dumps(obj2.to_dict())
def test_create_from_several_spectrums(self):
"""check that we can create a XASObject from numpy arrays"""
......@@ -125,9 +130,87 @@ class TestXASObject(unittest.TestCase):
self.assertEqual(obj2, self.xas_obj)
class TestXASObjectSerialization(unittest.TestCase):
def setUp(self) -> None:
self.energy, self.spectra = spectra_utils.create_dataset(
shape=(256, 20, 10))
self.output_dir = tempfile.mkdtemp()
self.spectra_path = '/data/NXdata/data'
self.channel_path = '/data/NXdata/Channel'
self.filename = os.path.join(self.output_dir, 'myfile.h5')
with h5py.File(self.filename, 'a') as f:
f[self.spectra_path] = self.spectra
f[self.channel_path] = self.energy
self.dimensions = (Dim.CHANNEL_ENERGY_DIM, Dim.Y_DIM, Dim.X_DIM)
self.url_spectra = DataUrl(file_path=self.filename,
data_path=self.spectra_path,
scheme='silx')
self.url_energy = DataUrl(file_path=self.filename,
data_path=self.channel_path,
scheme='silx')
self.process_flow_file = os.path.join(self.output_dir, 'process_flow.h5')
def tearDown(self) -> None:
shutil.rmtree(self.output_dir)
def test_serialization_url_auto(self):
"""Make sure the `to_dict` and `from_dict` functions are working
if no url are provided"""
xas_obj = XASObject(spectra=self.spectra, energy=self.energy,
dim1=20, dim2=10, keep_process_flow=False)
# if no h5 file defined, should fail to copy it to a dictionary
with self.assertRaises(ValueError):
xas_obj.to_dict()
xas_obj.link_to_h5(self.process_flow_file)
dict_xas_obj = xas_obj.to_dict()
# make sure it is serializable
json.dumps(dict_xas_obj)
# make sure we find a comparable xas object from it
xas_obj_2 = XASObject.from_dict(dict_xas_obj)
numpy.testing.assert_array_equal(xas_obj.energy, xas_obj_2.energy)
self.assertEqual(xas_obj, xas_obj_2)
# simple test without the process_details
dict_xas_obj = xas_obj.to_dict(with_process_details=False)
# make sure it is serializable
json.dumps(dict_xas_obj)
def test_serialization_url_provided(self):
"""Make sure the `to_dict` and `from_dict` functions are working
if url are provided"""
xas_obj = XASObject(spectra=self.spectra, energy=self.energy,
dim1=20, dim2=10, keep_process_flow=False,
energy_url=self.url_energy,
spectra_url=self.spectra_path)
# if no h5 file defined, should fail to copy it to a dictionary
with self.assertRaises(ValueError):
xas_obj.to_dict()
xas_obj.link_to_h5(self.process_flow_file)
dict_xas_obj = xas_obj.to_dict()
# make sure it is serializable
json.dumps(dict_xas_obj)
# make sure we find a comparable xas object from it
xas_obj_2 = XASObject.from_dict(dict_xas_obj)
numpy.testing.assert_array_equal(xas_obj.energy, xas_obj_2.energy)
self.assertEqual(xas_obj, xas_obj_2)
# simple test without the process_details
dict_xas_obj = xas_obj.to_dict(with_process_details=False)
# make sure it is serializable
json.dumps(dict_xas_obj)
def suite():
test_suite = unittest.TestSuite()
for ui in (TestSpectrum, TestXASObject, ):
for ui in (TestSpectrum, TestXASObject, TestXASObjectSerialization):
test_suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(ui))
return test_suite
......
......@@ -46,6 +46,7 @@ except ImportError:
else:
from larch.symboltable import Group
_Spectrum_Base = Group
from est.core.utils import larchutils
_has_larch = True
......@@ -77,10 +78,26 @@ class XASObject(object):
:param bool keep_process_flow: if True then will keep the trace of the set
of process applied to the XASObject into a
hdf5 file.
This is also used for the
:param spectra_url: path to the spectra data if any. Used when serializing
the XASObject. Won't read it from it.
:type: Union[None,str]
:param spectra_url_dimensions: dimensions of the stored spectra. WARNING:
this is different of the spectra dimension
which should be given in the
`DEFAULT_DIMENSIONS` order (Channel, Y, X)
:type: Union[tuple,None]
:param energy_url: path to the energy / channel data if any. Used when
serializing the XASObject. Won't read it from it.
:type: Union[None,str]
"""
def __init__(self, spectra=None, energy=None, configuration=None, dim1=None,
dim2=None, name='scan1', keep_process_flow=True):
DEFAULT_DIMENSIONS = (Dim.CHANNEL_ENERGY_DIM, Dim.Y_DIM, Dim.X_DIM)
def __init__(self, spectra=None, energy=None, configuration=None,
dim1=None, dim2=None, name='scan1', keep_process_flow=True,
spectra_url=None, spectra_url_dims=None,
energy_url=None):
self.__channels = None
self.__spectra = []
self.__energy = None
......@@ -89,6 +106,9 @@ class XASObject(object):
self.__processing_index = 0
self.__h5_file = None
self.__entry_name = name
self.__spectra_url = spectra_url
self.__spectra_url_dims = spectra_url_dims
self.__energy_url = energy_url
self.spectra = (energy, spectra, dim1, dim2)
self.configuration = configuration
if keep_process_flow is True:
......@@ -103,6 +123,35 @@ class XASObject(object):
def spectra(self):
return self.__spectra
@property
def spectra_url(self):
"""Url from where the spectra is available.
Used for object serialization"""
return self.__spectra_url
@spectra_url.setter
def spectra_url(self, url):
self.__spectra_url = url
@property
def spectra_url_dims(self):
"""used to interpret the spectra_url if any"""
return self.__spectra_url_dims
@spectra_url_dims.setter
def spectra_url_dims(self, dims):
self.__spectra_url_dims = dims
@property
def energy_url(self):
"""Url from where the energy is available.
Used for object serialization"""
return self.__energy_url
@energy_url.setter
def energy_url(self, url):
self.__energy_url = url
@spectra.setter
def spectra(self, energy_spectra):
energy, spectra, dim1, dim2 = energy_spectra
......@@ -119,8 +168,8 @@ class XASObject(object):
self.__dim2 = spectra.shape[2]
for y_i_spectrum in range(spectra.shape[1]):
for x_i_spectrum in range(spectra.shape[2]):
self.addSpectrum(Spectrum(energy=energy,
mu=spectra[:, y_i_spectrum, x_i_spectrum]))
self.add_spectrum(Spectrum(energy=energy,
mu=spectra[:, y_i_spectrum, x_i_spectrum]))
else:
if dim1 is None or dim2 is None:
raise ValueError(
......@@ -131,13 +180,13 @@ class XASObject(object):
self.__dim2 = dim2
for spectrum in spectra:
assert isinstance(spectrum, Spectrum)
self.addSpectrum(spectrum)
self.add_spectrum(spectrum)
self.energy = energy
def _setSpectra(self, spectra):
self.__spectra = spectra
def getSpectrum(self, dim1_idx, dim2_idx):
def get_spectrum(self, dim1_idx, dim2_idx):
"""Util function to access the spectrum at dim1_idx, dim2_idx"""
assert dim1_idx < self.dim1
assert dim2_idx < self.dim2
......@@ -146,7 +195,7 @@ class XASObject(object):
assert global_idx >= 0
return self.spectra[global_idx]
def addSpectrum(self, spectrum):
def add_spectrum(self, spectrum):
self.__spectra.append(spectrum)
@property
......@@ -185,6 +234,32 @@ class XASObject(object):
assert configuration is None or isinstance(configuration, dict)
self.__configuration = configuration or {}
def _create_saving_pt(self):
if not self.has_linked_file():
raise ValueError('there is not where to create a saving pt'
'(no h5 file linked to the XASObject)')
else:
def get_list_spectra():
res = {}
for i_spectrum, spectrum in enumerate(self.spectra):
res[i_spectrum] = spectrum.to_dict()
return res
from est.io import write_spectrum_saving_pt
# save spectra
entry = '/'.join((self.entry, 'est_saving_pt', 'spectra'))
for i_spectrum, spectrum in get_list_spectra().items():
path = '/'.join((entry, str(i_spectrum)))
write_spectrum_saving_pt(h5_file=self.linked_h5_file,
entry=path, obj=spectrum, overwrite=True)
# save channel
with h5py.File(self.linked_h5_file, 'a') as h5f:
entry = '/'.join((self.entry, 'est_saving_pt', 'channel'))
if entry in h5f:
del h5f[entry]
h5f[entry] = 'None' if self.energy is None else self.energy
def to_dict(self, with_process_details=True):
"""convert the XAS object to a dict
......@@ -197,35 +272,42 @@ class XASObject(object):
processes.
:type: bool
"""
def get_list_spectra():
res = []
for spectrum in self.spectra:
res.append(spectrum.to_dict())
return res
def get_spectra_and_processing():
if not self.has_linked_file():
raise ValueError('To get process details you should have a'
'`process` link file')
else:
# store the current spectra with processing information
data_path = '/'.join((self.entry, 'est_saving_pt', 'spectra'))
return DataUrl(file_path=self.linked_h5_file,
data_path=data_path,
scheme='est').path()
def get_energy():
if not self.has_linked_file():
raise ValueError('To get process details you should have a'
'`process` link file')
else:
data_path = '/'.join((self.entry, 'est_saving_pt', 'channel'))
return DataUrl(file_path=self.linked_h5_file,
data_path=data_path,
scheme='silx').path()
self._create_saving_pt()
spectra_ = get_spectra_and_processing()
res = {
'configuration': self.configuration,
'spectra': XASObject._spectra_volume(spectra=self.spectra,
key='mu',
dim_1=self.dim1,
dim_2=self.dim2),
'energy': self.energy,
'spectra': spectra_,
'energy': get_energy(),
'dim1': self.dim1,
'dim2': self.dim2,
}
if with_process_details is True:
res['spectra'] = get_list_spectra()
res['linked_h5_file'] = self.linked_h5_file
res['current_processing_index'] = self.__processing_index
return res
def _spectra_to_dict(self):
spectra_dict = {}
for i_spectrum, spectrum in enumerate(self.spectra):
assert isinstance(spectrum, Spectrum)
spectra_dict[str(i_spectrum) + '_spectrum'] = spectrum.to_dict()
return spectra_dict
def absorbed_beam(self):
return XASObject._spectra_volume(spectra=self.spectra,
key='mu',
......@@ -289,6 +371,8 @@ class XASObject(object):
def load_frm_dict(self, ddict):
"""load XAS values from a dict"""
from est.io import load_data # avoid cyclic import
contains_config_spectrum = 'configuration' in ddict or 'spectra' in ddict
"""The dict can be on the scheme of the to_dict function, containing
the spectra and the configuration. Otherwise we consider it is simply
......@@ -297,7 +381,12 @@ class XASObject(object):
self.configuration = ddict['configuration']
if 'spectra' in ddict:
spectra = ddict['spectra']
if not isinstance(spectra, numpy.ndarray):
# if spectra is given from an url
if isinstance(spectra, str):
spectra = load_data(data_url=DataUrl(path=spectra),
name='spectra')
# if come from a list of spectrum
elif not isinstance(spectra, numpy.ndarray):
new_spectra = []
for spectrum in spectra:
assert isinstance(spectrum, dict)
......@@ -307,6 +396,9 @@ class XASObject(object):
spectra = None
if 'energy' in ddict:
energy = ddict['energy']
if isinstance(energy, str):
energy_url = DataUrl(path=energy)
energy = load_data(data_url=energy_url, name='energy')
else:
energy = None
if 'dim1' in ddict:
......@@ -357,7 +449,7 @@ class XASObject(object):
def dump(self, h5_file):
"""dump the XAS object to a file_path within the Nexus format"""
from est.core.io import XASWriter
from est.core.io import XASWriter # avoid cyclic import
writer = XASWriter()
# dump raw data
writer.output_file = h5_file
......@@ -370,7 +462,7 @@ class XASObject(object):
This is the case for orange widget for example
where some reprocessing can append and each
process need to keep a clear history of
processes, whith no knowledge of next
processes, with no knowledge of next
processing.
"""
# To have dedicated h5 file we have to create one new h5 file for
......@@ -727,7 +819,10 @@ class Spectrum(_Spectrum_Base):
if key in self.__key_mapper:
return self.__key_mapper[key].fget(self)
else:
return self.__other_parameters[key]
if _has_larch and key in self._members():
return self._members()[key]
else:
return self.__other_parameters[key]
def __setitem__(self, key, value):
"""Need for pymca compatibility"""
......@@ -742,6 +837,13 @@ class Spectrum(_Spectrum_Base):
def load_frm_dict(self, ddict):
assert isinstance(ddict, dict)
def value_is_none(value):
if hasattr(value, 'decode'):
value = value.decode('UTF-8')
return value == 'None'
else:
return value is None
larch_ddict = {}
if '_larch_grp_members' in ddict:
if _has_larch is False:
......@@ -749,13 +851,17 @@ class Spectrum(_Spectrum_Base):
'requires it, won\'t be able to load data '
'relative to larch')
for key in ddict['_larch_grp_members']:
larch_ddict[key] = ddict[key]
if hasattr(key, 'decode'):
key = key.decode('UTF-8')
larch_ddict[key] = None if value_is_none(ddict[key]) else ddict[key]
del ddict[key]
for key, value in ddict.items():
self[key] = value
if hasattr(value, 'decode'):
value = value.decode('UTF-8')
self[key] = None if value_is_none(value) else value
for key, value in larch_ddict.items():
setattr(self, key, value)
setattr(self, key, None if value_is_none(value) else value)
return self
def update(self, spectrum):
......@@ -773,13 +879,13 @@ class Spectrum(_Spectrum_Base):
self._MU_KEY: self.mu,
self._ENERGY_KEY: self.energy,
self._FT_KEY: self.ft.to_dict(),
self._NORMALIZED_MU_KEY: self.normalized_mu,
self._NORMALIZED_ENERGY_KEY: self.normalized_energy,
self._NORMALIZED_SIGNAL_KEY: self.post_edge,
self._NORMALIZED_BACKGROUND_KEY: self.pre_edge,
self._EDGE_KEY: self.e0,
self._EXAFS_SIGNAL_KEY: self.chi,
self._EXAFS_KVALUES_KEY: self.k,
self._NORMALIZED_MU_KEY: 'None' if self.normalized_mu is None else self.normalized_mu,
self._NORMALIZED_ENERGY_KEY: 'None' if self.normalized_energy is None else self.normalized_energy,
self._NORMALIZED_SIGNAL_KEY: 'None' if self.post_edge is None else self.post_edge,
self._NORMALIZED_BACKGROUND_KEY: 'None' if self.pre_edge is None else self.pre_edge,
self._EDGE_KEY: 'None' if self.e0 is None else self.e0,
self._EXAFS_SIGNAL_KEY: 'None' if self.chi is None else self.chi,
self._EXAFS_KVALUES_KEY: 'None' if self.k is None else self.k,
}
if _has_larch:
res.update(self._getLarchGroupMenbers())
......@@ -788,11 +894,15 @@ class Spectrum(_Spectrum_Base):
def _getLarchGroupMenbers(self):
"""Return larch group specific menbers"""
assert _has_larch is True
res = {}
res['_larch_grp_members'] = list(self._members().keys())
for key in self._members().keys():
res[key] = getattr(self, key)
res['_larch_grp_members'] = self._members().keys()
if isinstance(self[key], Group):
res[key] = larchutils.group_to_dict(self[key])
else:
res[key] = getattr(self, key)
return res
def __str__(self):
......@@ -824,7 +934,7 @@ class Spectrum(_Spectrum_Base):
_obj = obj
assert isinstance(_obj, dict)
for key, value in _obj.items():
self[key] = value
self[key] = None if value == 'None' else value
def get_missing_keys(self, keys):
"""Return missing keys on the spectrum"""
......
# coding: utf-8
# /*##########################################################################
#
# Copyright (c) 2016-2017 European Synchrotron Radiation Facility
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ###########################################################################*/
"""Define some utils relative to larch"""
__authors__ = ["H. Payno"]
__license__ = "MIT"
__date__ = "02/12/2019"
from larch.symboltable import Group
def group_to_dict(group):
"""Convert the larch group to a serializable dictionary
:param group: the group to convert to a serializable dictionary
:type: larch.symboltable.Group
:returns: dictionary corresponding to the given larch.symboltable.Group
:rtype: dictionary
"""
res = {}
for key in group._members():
if isinstance(group._members()[key], Group):
res[key] = group_to_dict(group._members()[key])
else:
res[key] = group._members()[key]
def dict_to_group(dict_, group):
"""Update the given larch group with the content of the dictionary
:param dict_:
:type: dict
:param group:
:type: larch.symboltable.Group
"""
for key in dict_:
group._members()[key] = dict_[key]
......@@ -331,7 +331,7 @@ class SpectrumViewer(qt.QMainWindow):
assert dim1_index >= 0
assert dim2_index >= 0
spectrum = self.xas_obj.getSpectrum(dim1_index, dim2_index)
spectrum = self.xas_obj.get_spectrum(dim1_index, dim2_index)
for operation in self._curveOperations:
curves = [operation(spectrum),]
if silx_plot_has_baseline_feature is True:
......
......@@ -32,7 +32,7 @@ from datetime import datetime
import h5py
import numpy
from silx.io import utils
from silx.io.dictdump import dicttoh5
from silx.io.dictdump import dicttoh5, h5todict
from silx.io.url import DataUrl
from silx.utils.enum import Enum
......@@ -58,6 +58,82 @@ class InputType(Enum):
xmu_spectrum = '*.xmu' # contains one spectrum
def load_data(data_url, name):
"""
Load a specific data from an url. Manage the different scheme (silx, fabio,
numpy, PyMca, xraylarch)
:param data_url: silx DataUrl with path to the data
:type: DataUrl
:param str name: name of the data we want to load. Should be in
('spectra', 'energy', 'configuration')
:return: data loaded
:rtype: Union[None,dict,numpy.ndarray]
"""
if data_url is None:
return None
if data_url.scheme() in ('PyMca', 'PyMca5'):
if has_pymca is False:
_logger.warning('Requires PyMca to load data from '
'%s' % data_url.path())
return None
else:
assert name in ('spectra', 'energy')
energy, mu = pymca_read_spectrum(data_url.file_path())
if name == 'spectra':
return mu.reshape(mu.shape[0], 1, 1)
else:
return energy
elif data_url.scheme() in ('larch', 'xraylarch'):
if has_larch is False:
_logger.warning('Requires larch to load data from '
'%s' % data_url.path())
return None
else:
assert name in ('spectra', 'energy')
energy, mu = larch_read_ascii(xmu_file=data_url.file_path())
if name == 'spectra':
return mu.reshape(mu.shape[0], 1, 1)
else:
return energy
elif data_url.scheme() == 'numpy':
return numpy.load(data_url.file_path())
elif data_url.scheme() == 'est':
assert name == 'spectra'
spectra = []
with h5py.File(data_url.file_path(), 'r') as hdf5:
# get all possible entries
entries = filter(lambda x: isinstance(hdf5[x], h5py.Group) and 'est_saving_pt' in hdf5[x].keys(), hdf5.keys())
entries = list(entries)
if len(entries) == 0:
_logger.error('no spectra dataset found in the file', data_url.file_path())
return
if len(entries) > 1:
_logger.warning('several entry detected, only one will be loaded:', entries[0])
spectra_path = '/'.join((entries[0], 'est_saving_pt', 'spectra'))
node_spectra = hdf5[spectra_path]
spectrum_indexes = list(node_spectra.keys())
spectrum_indexes = list(map(lambda x: int(x), spectrum_indexes))
spectrum_indexes.sort()
from est.core.types import Spectrum
for index in spectrum_indexes:
spectrum_path = '/'.join((spectra_path, str(index)))
dict_ = h5todict(h5file=data_url.file_path(),
path=spectrum_path, asarray=False)
spectrum = Spectrum().load_frm_dict(dict_)
spectra.append(spectrum)
return spectra
else:
if data_url.is_valid():
try:
return utils.get_data(data_url)
except ValueError as e:
_logger.error(e)
else:
_logger.warning('invalid url for', name, ', will not load it')
def read_xas(spectra_url, channel_url, dimensions=None, config_url=None):
"""
Read the given spectra url and the config url if any
......@@ -90,42 +166,6 @@ def read_xas(spectra_url, channel_url, dimensions=None, config_url=None):
if not (_config_url is None or isinstance(_config_url, DataUrl)):