Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
workflow
ewoksapps
est
Commits
536e90ff
Commit
536e90ff
authored
Aug 04, 2021
by
payno
Browse files
Merge branch 'esrftaskgraph_prototype' into 'master'
Move Process to Ewoks `Task` and update setup See merge request workflow/est!40
parents
7dc04564
8a512d0f
Pipeline
#52500
failed with stages
in 11 minutes and 4 seconds
Changes
105
Pipelines
2
Hide whitespace changes
Inline
Side-by-side
.gitlab-ci.yml
View file @
536e90ff
...
...
@@ -34,8 +34,6 @@ doc:
-
python --version
-
python -m pip install pip --upgrade
-
python -m pip install setuptools --upgrade
-
python -m pip install -r requirements.txt
-
python -m pip install -r requirements-doc.txt
-
python -m pip install jupyterlab
-
python -m pip install pymca --upgrade --pre
-
python -m pip install xraylarch
...
...
@@ -43,7 +41,7 @@ doc:
-
mkdir artifacts
script
:
-
python setup.py build
-
python -m pip install .
-
python -m pip install .
[doc]
-
python setup.py build_sphinx
-
mv build/sphinx/html artifacts/doc
artifacts
:
...
...
@@ -75,10 +73,7 @@ doc:
-
install_anyqt 'master'
-
python -m pip install fabio --upgrade --pre
-
python -m pip install silx --upgrade --pre
-
python -m pip install -r requirements.txt
-
python -m pip install pymca --upgrade --pre
-
python -m pip install orange3==3.21
-
python -m pip install -r requirements.txt
-
python -m pip install .
-
/usr/bin/xvfb-run --server-args="-screen 0 1024x768x24" -a python -m pytest est
...
...
@@ -102,13 +97,9 @@ doc:
-
source ./ci/install_scripts.sh
-
install_anyqt 'master'
-
python -m pip install fabio --upgrade --pre
# - python -m pip install -r requirements.txt
-
python -m pip install silx --upgrade --pre
-
python -m pip install -r requirements.txt
-
python -m pip install packaging
-
python -m pip install xraylarch
-
python -m pip install orange3==3.21
-
python -m pip install -r requirements.txt
-
python -m pip install .
-
/usr/bin/xvfb-run --server-args="-screen 0 1024x768x24" -a python -m pytest est
...
...
@@ -132,14 +123,10 @@ doc:
-
source ./ci/install_scripts.sh
-
install_anyqt 'master'
-
python -m pip install fabio --upgrade --pre
# - python -m pip install -r requirements.txt
-
python -m pip install silx --upgrade --pre
-
python -m pip install pymca --upgrade --pre
-
python -m pip install -r requirements.txt
-
python -m pip install packaging
-
python -m pip install xraylarch
-
python -m pip install orange3==3.21
-
python -m pip install -r requirements.txt
-
python -m pip install .
-
/usr/bin/xvfb-run --server-args="-screen 0 1024x768x24" -a python -m pytest --cov=est est
...
...
@@ -177,7 +164,6 @@ test:test-est-tutorials_pymca:
-
python -m pip install ipykernel
-
python -m pip install pymca --upgrade --pre
-
python -m pip install silx --upgrade --pre
-
python -m pip install -r requirements.txt
-
python -m pip install .
-
ls doc/tutorials/
script
:
...
...
est/__init__.py
View file @
536e90ff
...
...
@@ -39,11 +39,8 @@ _logging.getLogger(__name__).addHandler(_logging.NullHandler())
project
=
_os
.
path
.
basename
(
_os
.
path
.
dirname
(
_os
.
path
.
abspath
(
__file__
)))
try
:
from
._version
import
__date__
as
date
# noqa
from
._version
import
version
,
version_info
,
hexversion
,
strictversion
# noqa
except
ImportError
:
pass
# not raised for now because fail with pip install -e option. Something to
# look at.
# raise RuntimeError("Do NOT use %s from its sources: build it and use the built version" % project)
from
.version
import
__date__
as
date
# noqa
from
.version
import
version
,
version_info
,
hexversion
,
strictversion
# noqa
__version__
=
version
est/app/canvas_launcher/__init__.py
View file @
536e90ff
from
.launcher
import
Launcher
from
.launcher
import
Launcher
# noqa
est/app/ows_to_script.py
deleted
100644 → 0
View file @
7dc04564
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import
logging
import
sys
import
argparse
from
pypushflow.representation.scheme.ows_parser
import
OwsParser
from
pypushflow
import
Workflow
import
est.version
import
pypushflow.version
import
subprocess
import
os
logging
.
basicConfig
(
level
=
logging
.
DEBUG
)
_logger
=
logging
.
getLogger
(
__name__
)
def
_convert
(
scheme
,
output_file
,
overwrite
):
"""
:param scheme:
:param scan:
:param timeout:
:return:
"""
_logger
.
warning
(
"translate {} to {}"
.
format
(
scheme
,
output_file
))
if
os
.
path
.
exists
(
output_file
):
if
overwrite
is
True
:
os
.
remove
(
output_file
)
else
:
raise
ValueError
(
"{} already exists."
.
format
(
output_file
))
with
open
(
output_file
,
"w+"
)
as
file_
:
file_
.
write
(
_dump_info_generation
())
workflow
=
Workflow
.
ProcessableWorkflow
(
scheme
)
converter
=
Workflow
.
Converter
(
workflow
=
workflow
,
output_file
=
output_file
,
with_opts
=
True
)
converter
.
process
()
# set up workflow
with
open
(
output_file
,
mode
=
"a"
)
as
file_
:
file_
.
write
(
_dump_executable_script_section
())
# call `black` on it if available
try
:
# use help because there is no information regarding version
subprocess
.
call
([
"python"
,
"-m"
,
"black"
,
output_file
],
stdout
=
subprocess
.
PIPE
)
except
Exception
as
e
:
_logger
.
error
(
"Fail to apply black on {}. Error is "
"{}"
.
format
(
output_file
,
e
)
)
_logger
.
info
(
"translation finished. You can execute python {} [[--input --input-spectra --input-spectra-dims --input-channel --input-energy-unit --input-dimensions]]"
.
format
(
output_file
)
)
def
_dump_info_generation
():
return
(
"# This file has been generated automatically using
\n
"
"# pypushflow {} and est {}
\n
"
.
format
(
pypushflow
.
version
.
version
,
est
.
version
.
version
)
)
def
_dump_executable_script_section
():
return
"""
from pypushflow.utils import IgnoreProcess
if __name__ == '__main__':
import sys
import argparse
from est.app.utils import get_xas_obj
from est.app.utils import get_url
from est.app.utils import get_unit
from est.app.utils import convert_spectra_dims
from silx.io.url import DataUrl
from est.core.types import Dim
from est.io.utils.information import InputInformation
from est.io.utils.information import SpecInputInformation
parser = argparse.ArgumentParser(description=__doc__)
# single file input option
parser.add_argument(
"-i",
"--input",
dest="input_",
default=None,
help="Input of the workflow. Should be a path to a file",
)
# input url option
parser.add_argument(
"--input-spectra",
"--spectra",
dest="input_spectra",
default=None,
help="Input spectra url",
)
parser.add_argument(
"--input-spectra-dims",
"--spectra-dims",
dest="input_spectra_dims",
default=None,
help="Input spectra dimension. Should be a tuple of three values: "
"(X,Y,channel). If None will take the default dimension "
"according to the input type.",
)
parser.add_argument(
"--input-energy",
"--input-channel",
"--channel",
dest="input_channel",
default=None,
help="Input channel url (usually energy)",
)
parser.add_argument(
"--input-configuration",
"--configuration",
dest="input_configuration",
default=None,
help="Input configuration url",
)
parser.add_argument(
"--input-energy-unit",
"--energy-unit",
dest="input_energy_unit",
default="eV",
help="energy unit",
)
parser.add_argument(
"--input-dimensions",
"--dimensions",
dest="input_dimensions",
default="None",
help="dimension of the input as (Z,Y,X) for example."
"If None will take default unit according to the input type",
)
# I0, I1, I2 & mu_ref
parser.add_argument(
"--input-I0",
"--I0",
dest="input_I0",
default="None",
help="url to I0",
)
parser.add_argument(
"--input-I1",
"--I1",
dest="input_I1",
default="None",
help="url to I1",
)
parser.add_argument(
"--input-I2",
"--I2",
dest="input_I2",
default="None",
help="url to I2",
)
parser.add_argument(
"--input-mu-ref",
"--mu-ref",
dest="input_mu_ref",
default="None",
help="url to mu_ref",
)
# spec file specific inputs
parser.add_argument(
"--input-energy-col-name",
"--energy-col-name",
dest="input_energy_col_name",
default=None,
help="Provide name of the energy column for spec file",
)
parser.add_argument(
"--input-abs-col-name",
"--abs-col-name",
dest="input_abs_col_name",
default=None,
help="Provide name of the absorption column for spec file",
)
parser.add_argument(
"--input-monitor-col-name",
"--monitor-col-name",
dest="input_monitor_col_name",
default=None,
help="Provide name of the monitor column for spec file",
)
parser.add_argument(
"--input-scan-title",
"--scan-title",
dest="input_scan_title_name",
default=None,
help="Provide scan title name to consider",
)
# handle larch settings
parser.add_argument(
"--set-autobk-params",
dest="set_autobk_params",
default=None,
help="set autobk settings",
)
parser.add_argument(
"--set-mback-params",
dest="set_mback_params",
default=None,
help="set mback settings",
)
parser.add_argument(
"--set-mback-norm-params",
dest="set_mback_norm_params",
default=None,
help="set mback norm settings",
)
parser.add_argument(
"--set-pre-edge-params",
dest="set_pre_edge_params",
default=None,
help="set pre-edge settings",
)
parser.add_argument(
"--set-xftf-params",
dest="set_xftf_params",
default=None,
help="set xftf settings",
)
# handle noise settings
parser.add_argument(
"--set-noise-params",
dest="set_noise_params",
default=None,
help="set noise settings",
)
# handle output settings
parser.add_argument(
"--set-output-params",
dest="set_output_params",
default=None,
help="set output settings",
)
# handle energy roi settings
parser.add_argument(
"--set-energyroi-params",
dest="set_energyroi_params",
default=None,
help="set energy roi settings",
)
options = parser.parse_args(sys.argv[1:])
input_information = InputInformation(
spectra_url=get_url(options.input_spectra),
channel_url=get_url(options.input_channel),
dimensions=convert_spectra_dims(options.input_spectra_dims),
config_url=get_url(options.input_configuration),
energy_unit=get_unit(options.input_energy_unit),
spec_input=SpecInputInformation(
options.input_,
options.input_energy_col_name,
options.input_abs_col_name,
options.input_monitor_col_name,
options.input_scan_title_name,
),
)
input_information.I0 = get_url(options.input_I0)
input_information.I1 = get_url(options.input_I1)
input_information.I2 = get_url(options.input_I2)
input_information.mu_ref = get_url(options.input_mu_ref)
xas_obj = get_xas_obj(input_information)
main(input_data=xas_obj, channel="xas_obj", options=options)
"""
def
main
(
argv
):
parser
=
argparse
.
ArgumentParser
(
description
=
__doc__
)
parser
.
add_argument
(
"workflow_file"
,
help
=
"Path to the .ows file defining the workflow to process with the"
"provided scan"
,
)
parser
.
add_argument
(
"output_file"
,
help
=
"Output python file"
)
parser
.
add_argument
(
"--overwrite"
,
help
=
"Overwrite output file if exists"
,
default
=
False
,
action
=
"store_true"
,
)
parser
.
add_argument
(
"--debug"
,
dest
=
"debug"
,
action
=
"store_true"
,
default
=
False
,
help
=
"Set logging system in debug mode"
,
)
options
=
parser
.
parse_args
(
argv
[
1
:])
if
not
options
.
output_file
.
lower
().
endswith
(
".py"
):
options
.
output_file
=
options
.
output_file
+
".py"
# tune the log level
log_level
=
logging
.
INFO
if
options
.
debug
is
True
:
log_level
=
logging
.
DEBUG
for
log_
in
(
"est"
,
"pypushflow"
):
logging
.
getLogger
(
log_
).
setLevel
(
log_level
)
scheme
=
OwsParser
.
scheme_load
(
options
.
workflow_file
,
load_handlers
=
True
)
_convert
(
scheme
=
scheme
,
output_file
=
options
.
output_file
,
overwrite
=
options
.
overwrite
)
if
__name__
==
"__main__"
:
main
(
sys
.
argv
)
# convert an ows file to a script calling est low level processes.
est/app/process.py
View file @
536e90ff
import
argparse
import
sys
from
pypushflow.Workflow
import
ProcessableWorkflow
from
pypushflow.representation.scheme.ows_parser
import
OwsParser
import
logging
import
signal
from
pypushflow.representation.scheme.scheme
import
Scheme
from
silx.io.url
import
DataUrl
from
est.units
import
ur
from
est.io.utils.information
import
InputInformation
from
est.io.utils.information
import
SpecInputInformation
from
ewoksorange.owsconvert
import
ows_to_ewoks
from
.utils
import
get_unit
from
.utils
import
get_url
from
.utils
import
convert_spectra_dims
from
.utils
import
get_xas_obj
try
:
import
h5py
has_h5py
=
True
except
:
import
h5py
# noqa
except
ImportError
:
has_h5py
=
False
from
typing
import
Union
else
:
has_h5py
=
True
logging
.
basicConfig
(
level
=
logging
.
DEBUG
)
_logger
=
logging
.
getLogger
(
__name__
)
def
_insert_input_in_scheme
(
scheme
,
input_information
):
"""update 'starting' node properties to include the provided input"""
# monkey patch the input file for start nodes if an input is given
for
node
in
scheme
.
nodes
:
if
node
.
properties
and
"_spec_file_setting"
in
node
.
properties
:
if
input_information
.
is_spec_input
():
node
.
properties
[
"_spec_file_setting"
]
=
input_information
.
spec_info
if
input_information
.
spectra_url
is
not
None
:
node
.
properties
[
"_spectra_url_setting"
]
=
input_information
.
spectra_url
.
path
()
if
input_information
.
dimensions
is
not
None
:
node
.
properties
[
"_dimensions_setting"
]
=
input_information
.
dimensions
if
input_information
.
channel_url
is
not
None
:
node
.
properties
[
"_energy_url_setting"
]
=
input_information
.
channel_url
.
path
()
if
input_information
.
configuration_url
is
not
None
:
node
.
properties
[
"_configuration_url_setting"
]
=
input_information
.
configuration_url
.
path
()
def
_insert_output_in_scheme
(
scheme
,
output_
):
"""update 'starting' node properties to include the provided input"""
found_output
=
False
# monkey patch the input file for start nodes if an input is given
for
node
in
scheme
.
nodes
:
if
node
.
properties
and
"_output_file_setting"
in
node
.
properties
:
node
.
properties
[
"_output_file_setting"
]
=
output_
found_output
=
True
if
not
found_output
:
_logger
.
warning
(
"No node for processing output found. output "
"information provided will be ignored"
)
def
exec_
(
scheme
:
Scheme
,
input_information
:
InputInformation
,
output_
:
Union
[
str
,
None
,
dict
]
=
None
,
timeout
:
Union
[
int
,
None
]
=
None
,
):
if
not
input_information
.
is_valid
():
raise
ValueError
(
"You cannot provide an input file and input urls"
)
_insert_input_in_scheme
(
input_information
=
input_information
,
scheme
=
scheme
)
if
output_
is
not
None
:
_insert_output_in_scheme
(
scheme
=
scheme
,
output_
=
output_
)
workflow
=
ProcessableWorkflow
(
scheme
=
scheme
)
# add SIGINT capture
def
signal_handler
(
sig
,
frame
):
_logger
.
warning
(
"stop workflow execution on user request"
)
workflow
.
_end_actor
.
join
(
0
)
sys
.
exit
(
0
)
signal
.
signal
(
signal
.
SIGINT
,
signal_handler
)
xas_obj
=
get_xas_obj
(
input_information
=
input_information
)
workflow
.
_start_actor
.
trigger
((
"data"
,
xas_obj
.
to_dict
()))
workflow
.
_end_actor
.
join
(
timeout
)
res
=
workflow
.
_end_actor
.
out_data
title
=
scheme
.
title
or
"unknow"
_logger
.
info
(
"workflow '{}' completed with {}"
.
format
(
title
,
str
(
input_information
))
)
return
res
def
main
(
argv
):
parser
=
argparse
.
ArgumentParser
(
description
=
__doc__
)
parser
.
add_argument
(
...
...
@@ -157,7 +76,7 @@ def main(argv):
"--input-dimensions"
,
"--dimensions"
,
dest
=
"input_dimensions"
,
default
=
"
None
"
,
default
=
None
,
help
=
"dimension of the input as (Z,Y,X) for example."
"If None will take default unit according to the input type"
,
)
...
...
@@ -166,28 +85,28 @@ def main(argv):
"--input-I0"
,
"--I0"
,
dest
=
"input_I0"
,
default
=
"
None
"
,
default
=
None
,
help
=
"url to I0"
,
)
parser
.
add_argument
(
"--input-I1"
,
"--I1"
,
dest
=
"input_I1"
,
default
=
"
None
"
,
default
=
None
,
help
=
"url to I1"
,
)
parser
.
add_argument
(
"--input-I2"
,
"--I2"
,
dest
=
"input_I2"
,
default
=
"
None
"
,
default
=
None
,
help
=
"url to I2"
,
)
parser
.
add_argument
(
"--input-mu-ref"
,
"--mu-ref"
,
dest
=
"input_mu_ref"
,
default
=
"
None
"
,
default
=
None
,
help
=
"url to mu_ref"
,
)
# spec file specific inputs
...
...
@@ -201,6 +120,8 @@ def main(argv):
parser
.
add_argument
(
"--input-abs-col-name"
,
"--abs-col-name"
,
"--input-mu-col-name"
,
"--mu-col-name"
,
dest
=
"input_abs_col_name"
,
default
=
None
,
help
=
"Provide name of the absorption column for spec file"
,
...
...
@@ -223,14 +144,13 @@ def main(argv):
parser
.
add_argument
(
"-o"
,
"--output"
,
dest
=
"output_"
,
dest
=
"output_
file
"
,
default
=
None
,
help
=
"Output file of the workflow. Require at most one "
"instance of XASOutputOW"
,
)
options
=
parser
.
parse_args
(
argv
[
1
:])
scheme
=
OwsParser
.
scheme_load
(
options
.
workflow_file
,
load_handlers
=
True
)
options
=
parser
.
parse_args
(
argv
[
1
:]
)
input_information
=
InputInformation
(
spectra_url
=
get_url
(
options
.
input_spectra
),
channel_url
=
get_url
(
options
.
input_channel
),
...
...
@@ -249,7 +169,15 @@ def main(argv):
input_information
.
I1
=
get_url
(
options
.
input_I1
)
input_information
.
I2
=
get_url
(
options
.
input_I2
)
input_information
.
mu_ref
=
get_url
(
options
.
input_mu_ref
)
exec_
(
scheme
=
scheme
,
input_information
=
input_information
)
graph
=
ows_to_ewoks
(
filename
=
options
.
workflow_file
)
varinfo
=
{
"input_information"
:
input_information
.
to_dict
(),
}
if
options
.
output_file
:
varinfo
[
"output_file"
]
=
options
.
output_file
graph
.
execute
(
varinfo
=
varinfo
)
if
__name__
==
"__main__"
:
...
...
est/app/reprocessing.py
deleted
100644 → 0
View file @
7dc04564
import
argparse
import
sys
import
logging
from
est.core.types
import
XASObject
from
est.core.reprocessing
import
get_process_instance_frm_h5_desc
from
typing
import
Union
try
:
import
h5py
has_h5py
=
True
except
:
has_h5py
=
False
logging
.
basicConfig
(
level
=
logging
.
DEBUG
)
_logger
=
logging
.
getLogger
(
__name__
)
def
exec_
(
h5_file
:
str
,
entry
:
str
=
"scan1"
,
spectra_path
:
Union
[
None
,
str
]
=
None
,
energy_path
:
Union
[
None
,
str
]
=
None
,
configuration_path
:
Union
[
None
,
str
]
=
None
,
):