Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
workflow
ewoksapps
est
Commits
f5ccefa2
Commit
f5ccefa2
authored
Oct 07, 2021
by
payno
Browse files
Merge branch 'integrate_ewoks_v2' into 'master'
Integrate ewoks v2 See merge request workflow/est!47
parents
f3be7257
e3b4f2bb
Pipeline
#56180
failed with stages
in 8 minutes and 46 seconds
Changes
40
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
.gitlab-ci.yml
View file @
f5ccefa2
...
...
@@ -16,10 +16,10 @@ check_style:
stage
:
style
image
:
docker-registry.esrf.fr/dau/est:python3.7_stretch_pyqt5_gtk3_wxpython
before_script
:
-
pip install
black
flake8
-
pip install flake8
script
:
-
LC_ALL=C.UTF-8 black --check --safe .
-
flake8
-
flake8 est
-
flake8
orangecontrib
# build
...
...
@@ -52,7 +52,6 @@ doc:
only
:
-
master
# test
.build_template
:
&test_linux_template_pymca
stage
:
test
...
...
@@ -145,6 +144,30 @@ test:python3.7-stretch-pyqt5_pymca_larch:
image
:
docker-registry.esrf.fr/dau/est:python3.7_stretch_pyqt5_gtk3_wxpython
<<
:
*test_linux_template_pymca_larch
check_lint
:
stage
:
test
image
:
docker-registry.esrf.fr/dau/ewoks:python_3.8
tags
:
-
linux
before_script
:
-
arch
-
which python
-
python --version
-
python -m pip install pip --upgrade
-
python -m pip install setuptools --upgrade
-
python -m pip install flake8
script
:
-
python setup.py build
-
python -m pip install .[doc]
-
python setup.py build_sphinx
-
mv build/sphinx/html artifacts/doc
artifacts
:
paths
:
-
artifacts/doc/
when
:
on_success
expire_in
:
2h
only
:
-
master
test:test-est-tutorials_pymca:
image
:
docker-registry.esrf.fr/dau/est:python3.7_stretch_pyqt5_gtk3_wxpython
...
...
est/core/io.py
View file @
f5ccefa2
...
...
@@ -217,7 +217,7 @@ class XASWriter(object):
if
not
self
.
_output_file
:
_logger
.
warning
(
"no output file defined, please give path to the
"
"
output file"
"no output file defined, please give path to the
output file"
)
self
.
_output_file
=
input
()
...
...
est/core/process/energyroi.py
View file @
f5ccefa2
...
...
@@ -83,15 +83,16 @@ class EnergyROIProcess(
xas_obj
.
spectra
.
energy
>=
roi
.
from_
)
xas_obj
.
spectra
.
energy
=
xas_obj
.
spectra
.
energy
[
mask
]
for
spectrum
in
xas_obj
.
spectra
.
data
.
flat
:
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
.
data
.
flat
):
spectrum
.
energy
=
spectrum
.
energy
[
mask
]
spectrum
.
mu
=
spectrum
.
mu
[
mask
]
for
attached_key
in
(
"I0"
,
"I1"
,
"I2"
,
"mu_ref"
):
if
hasattr
(
spectrum
,
attached_key
):
values
=
getattr
(
spectrum
,
attached_key
)[
mask
]
setattr
(
spectrum
,
attached_key
,
values
)
#
print(spectrum.__dict__)
# print(xas_obj.__dict__)
self
.
progress
=
i_s
/
n_s
*
100.0
return
xas_obj
def
run
(
self
):
...
...
@@ -109,8 +110,10 @@ class EnergyROIProcess(
# existing roi is priority. This is the case if called from pushworkflow
# for example.
self
.
progress
=
0.0
if
self
.
_roi
is
not
None
:
xas_obj
=
self
.
_apply_roi
(
xas_obj
=
_xas_obj
,
roi
=
self
.
_roi
)
self
.
progress
=
100.0
self
.
register_process
(
_xas_obj
,
data_keys
=
(
"Mu"
,
"energy"
))
self
.
outputs
.
xas_obj
=
_xas_obj
.
to_dict
()
...
...
est/core/process/io.py
View file @
f5ccefa2
...
...
@@ -31,22 +31,24 @@ __date__ = "06/11/2019"
from
.process
import
Process
from
est.core.types
import
XASObject
from
est.io.utils.information
import
InputInformation
from
typing
import
Optional
import
pkg_resources
class
DumpXasObject
(
Process
,
name
=
"dump xas object"
,
input_names
=
[
"xas_obj"
],
output_names
=
[
"result"
]
):
def
__init__
(
self
,
**
kwargs
):
def
__init__
(
self
,
missing_file_callback
:
Optional
[
str
]
=
None
,
**
kwargs
):
self
.
_missing_file_callback
=
missing_file_callback
super
().
__init__
(
**
kwargs
)
self
.
_output_file
=
kwargs
.
get
(
"varinfo"
,
dict
()).
get
(
"output_file"
,
None
)
@
property
def
output_file
(
self
):
def
output_file
(
self
)
->
Optional
[
str
]
:
return
self
.
_output_file
@
output_file
.
setter
def
output_file
(
self
,
output_file
):
def
output_file
(
self
,
output_file
:
Optional
[
str
]
):
self
.
_output_file
=
output_file
def
program_name
(
self
):
...
...
@@ -72,6 +74,8 @@ class DumpXasObject(
"xas_object should be a convertable dict or an"
"instance of XASObject"
)
if
self
.
output_file
is
None
and
self
.
_missing_file_callback
is
not
None
:
self
.
output_file
=
self
.
_missing_file_callback
()
if
self
.
output_file
is
None
:
raise
ValueError
(
"output file not provided"
)
else
:
...
...
est/core/process/larch/autobk.py
View file @
f5ccefa2
...
...
@@ -28,8 +28,6 @@ from est.core.types import Spectrum, XASObject
from
est.core.process.process
import
Process
from
est.core.process.process
import
_NexusDatasetDef
from
larch.xafs.autobk
import
autobk
import
multiprocessing
import
functools
import
logging
_logger
=
logging
.
getLogger
(
__name__
)
...
...
@@ -128,10 +126,6 @@ def larch_autobk(xas_obj):
return
mback_obj
.
run
()
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
class
Larch_autobk
(
Process
,
name
=
"autobk"
,
...
...
@@ -152,48 +146,24 @@ class Larch_autobk(
self
.
setConfiguration
(
self
.
inputs
.
autobk
)
_xas_obj
.
configuration
[
"autobk"
]
=
self
.
inputs
.
autobk
self
.
_advancement
.
reset
(
max_
=
_xas_obj
.
n_spectrum
)
self
.
_advancement
.
startProcess
()
self
.
progress
=
0
self
.
_pool_process
(
xas_obj
=
_xas_obj
)
self
.
_advancement
.
endProcess
()
self
.
progress
=
100
self
.
register_process
(
_xas_obj
,
data_keys
=
(
_NexusDatasetDef
(
"bkg"
),))
self
.
outputs
.
xas_obj
=
_xas_obj
.
to_dict
()
return
_xas_obj
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_spectr_autobk
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
5
)
as
p
:
partial_
=
functools
.
partial
(
process_spectr_autobk
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_spectr_autobk
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):
return
"autobk calculation"
...
...
est/core/process/larch/mback.py
View file @
f5ccefa2
...
...
@@ -28,8 +28,6 @@ from est.core.types import Spectrum, XASObject
from
est.core.process.process
import
Process
from
est.core.process.process
import
_NexusDatasetDef
from
larch.xafs.mback
import
mback
import
multiprocessing
import
functools
import
logging
_logger
=
logging
.
getLogger
(
__name__
)
...
...
@@ -119,10 +117,6 @@ def larch_mback(xas_obj):
return
mback_obj
.
run
()
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
class
Larch_mback
(
Process
,
name
=
"mback"
,
...
...
@@ -146,10 +140,9 @@ class Larch_mback(
elif
"mback"
not
in
_xas_obj
.
configuration
:
_xas_obj
.
configuration
[
"mback"
]
=
{}
self
.
_advancement
.
reset
(
max_
=
_xas_obj
.
n_spectrum
)
self
.
_advancement
.
startProcess
()
self
.
progress
=
0.0
self
.
_pool_process
(
xas_obj
=
_xas_obj
)
self
.
_advancement
.
endProcess
()
self
.
progress
=
100.0
self
.
register_process
(
_xas_obj
,
data_keys
=
(
...
...
@@ -162,38 +155,15 @@ class Larch_mback(
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_spectr_mback
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
[
"mback"
],
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
5
)
as
p
:
partial_
=
functools
.
partial
(
process_spectr_mback
,
configuration
=
xas_obj
.
configuration
[
"mback"
],
callbacks
=
self
.
callbacks
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_spectr_mback
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
[
"mback"
],
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):
return
"mback calculation"
...
...
est/core/process/larch/mback_norm.py
View file @
f5ccefa2
...
...
@@ -29,8 +29,6 @@ from est.core.process.process import Process
from
est.core.process.process
import
_NexusDatasetDef
from
larch.xafs.mback
import
mback_norm
from
larch.xafs.pre_edge
import
preedge
import
multiprocessing
import
functools
import
logging
_logger
=
logging
.
getLogger
(
__name__
)
...
...
@@ -128,10 +126,6 @@ def larch_mback_norm(xas_obj):
return
mback_obj
.
run
()
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
class
Larch_mback_norm
(
Process
,
name
=
"mback_norm"
,
...
...
@@ -152,10 +146,9 @@ class Larch_mback_norm(
self
.
setConfiguration
(
self
.
inputs
.
mback_norm
)
_xas_obj
.
configuration
[
"mback_norm"
]
=
self
.
inputs
.
mback_norm
self
.
_advancement
.
reset
(
max_
=
_xas_obj
.
n_spectrum
)
self
.
_advancement
.
startProcess
()
self
.
progress
=
0.0
self
.
_pool_process
(
xas_obj
=
_xas_obj
)
self
.
_advancement
.
endProcess
()
self
.
progress
=
100.0
data_keys
=
[
_NexusDatasetDef
(
"mback_mu"
),
_NexusDatasetDef
(
"norm_mback"
),
...
...
@@ -168,38 +161,15 @@ class Larch_mback_norm(
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_spectr_mback_norm
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
5
)
as
p
:
partial_
=
functools
.
partial
(
process_spectr_mback_norm
,
configuration
=
xas_obj
.
configuration
,
callback
=
self
.
_advancement
.
increaseAdvancement
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_spectr_mback_norm
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):
return
"mback norm calculation"
...
...
est/core/process/larch/pre_edge.py
View file @
f5ccefa2
...
...
@@ -29,8 +29,6 @@ from est.core.process.process import Process
from
est.core.process.process
import
_NexusSpectrumDef
,
_NexusDatasetDef
from
est.core.utils.symbol
import
MU_CHAR
from
larch.xafs.pre_edge
import
pre_edge
import
multiprocessing
import
functools
import
logging
_logger
=
logging
.
getLogger
(
__name__
)
...
...
@@ -98,7 +96,9 @@ def process_spectr_pre_edge(
opts
[
opt_name
]
=
_conf
[
opt_name
]
if
_DEBUG
is
True
:
assert
isinstance
(
spectrum
,
Group
)
assert
isinstance
(
spectrum
,
Group
),
f
"spectrum is expected to be an instance of
{
type
(
Group
)
}
. Not
{
type
(
spectrum
)
}
"
if
overwrite
:
_spectrum
=
spectrum
else
:
...
...
@@ -124,10 +124,6 @@ def larch_pre_edge(xas_obj):
return
mback_obj
.
run
()
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
class
Larch_pre_edge
(
Process
,
name
=
"pre_edge"
,
...
...
@@ -149,10 +145,9 @@ class Larch_pre_edge(
self
.
setConfiguration
(
self
.
inputs
.
pre_edge
)
_xas_obj
.
configuration
[
"pre_edge"
]
=
self
.
inputs
.
pre_edge
self
.
_advancement
.
reset
(
max_
=
_xas_obj
.
n_spectrum
)
self
.
_advancement
.
startProcess
()
self
.
progress
=
0.0
self
.
_pool_process
(
xas_obj
=
_xas_obj
)
self
.
_advancement
.
endProcess
()
self
.
progress
=
100.0
self
.
register_process
(
_xas_obj
,
data_keys
=
(
...
...
@@ -218,38 +213,15 @@ class Larch_pre_edge(
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_spectr_pre_edge
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
5
)
as
p
:
partial_
=
functools
.
partial
(
process_spectr_pre_edge
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_spectr_pre_edge
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):
return
"pre_edge calculation"
...
...
est/core/process/larch/xftf.py
View file @
f5ccefa2
...
...
@@ -30,8 +30,6 @@ from est.core.process.process import _NexusSpectrumDef
from
est.core.process.process
import
_NexusDatasetDef
from
larch.xafs.xafsft
import
xftf
from
est.core.utils.symbol
import
ANGSTROM_CHAR
import
multiprocessing
import
functools
import
logging
import
numpy
...
...
@@ -151,10 +149,6 @@ def larch_xftf(xas_obj):
return
mback_obj
.
run
()
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
class
Larch_xftf
(
Process
,
name
=
"xftf"
,
...
...
@@ -254,38 +248,15 @@ class Larch_xftf(
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_spectr_xftf
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
5
)
as
p
:
partial_
=
functools
.
partial
(
process_spectr_xftf
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_spectr_xftf
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):
return
"xftf calculation"
...
...
est/core/process/noise.py
View file @
f5ccefa2
...
...
@@ -36,16 +36,10 @@ import scipy.signal
import
logging
import
numpy
import
pkg_resources
import
multiprocessing
import
functools
_logger
=
logging
.
getLogger
(
__name__
)
_USE_MULTIPROCESSING_POOL
=
False
# note: we cannot use multiprocessing pool with pypushflow for now.
def
process_noise_savgol
(
spectrum
,
configuration
,
...
...
@@ -178,10 +172,9 @@ class NoiseProcess(
if
self
.
_settings
:
_xas_obj
.
configuration
[
"noise"
]
=
self
.
_settings
self
.
_advancement
.
reset
(
max_
=
_xas_obj
.
n_spectrum
)
self
.
_advancement
.
startProcess
()
self
.
progress
=
0.0
self
.
_pool_process
(
xas_obj
=
_xas_obj
)
self
.
_advancement
.
endProcess
()
self
.
progress
=
100.0
self
.
register_process
(
_xas_obj
,
data_keys
=
(
...
...
@@ -205,38 +198,15 @@ class NoiseProcess(
def
_pool_process
(
self
,
xas_obj
):
assert
isinstance
(
xas_obj
,
XASObject
)
if
not
_USE_MULTIPROCESSING_POOL
:
for
spectrum
in
xas_obj
.
spectra
:
process_noise_savgol
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
else
:
from
multiprocessing
import
Manager
manager
=
Manager
()
output_dict
=
{}
res_list
=
manager
.
list
()
for
i_spect
,
spect
in
enumerate
(
xas_obj
.
spectra
):
res_list
.
append
(
None
)
output_dict
[
spect
]
=
i_spect
with
multiprocessing
.
Pool
(
1
)
as
p
:
partial_
=
functools
.
partial
(
process_noise_savgol
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
False
,
output
=
res_list
,
output_dict
=
output_dict
,
)
p
.
map
(
partial_
,
xas_obj
.
spectra
)
# then update local spectrum
for
spectrum
,
res
in
zip
(
xas_obj
.
spectra
,
res_list
):
spectrum
.
update
(
res
)
n_s
=
len
(
xas_obj
.
spectra
.
data
.
flat
)
for
i_s
,
spectrum
in
enumerate
(
xas_obj
.
spectra
):
process_noise_savgol
(
spectrum
=
spectrum
,
configuration
=
xas_obj
.
configuration
,
callbacks
=
self
.
callbacks
,
overwrite
=
True
,
)
self
.
progress
=
i_s
/
n_s
*
100.0
def
definition
(
self
):