...
 
Commits (2161)
......@@ -3,6 +3,7 @@
*.pyc
*.pyo
*~
*.py,cover
*.bck
build
*.swp
......@@ -26,3 +27,6 @@ htmlcov/
*.so
bliss/release.py
prof
.mypy_cache/
graph.dot
graph.png
DOMAIN="bliss.gitlab.esrf.fr"
ADDRESS="https://bliss.gitlab-pages.esrf.fr/bliss"
echo "<!DOCTYPE html>" > public/index.html
echo "<html lang=en>" >> public/index.html
echo "<head><title>Bliss Versions</title><h1>Bliss</h1></head>" >> public/index.html
echo "<body>" >> public/index.html
for bliss_tag in $@
do
echo "LOOKING FOR $bliss_tag"
wget --recursive --page-requisites --html-extension --convert-links --cut-dirs=1 --no-parent --no-host-directories "$ADDRESS/$bliss_tag/" -P ./public
test -e ./public/$bliss_tag && echo "<ul><a href=\"./$bliss_tag/index.html\">$bliss_tag</a></ul>" >> public/index.html
done
echo "</body>" >> public/index.html
echo "</html>" >> public/index.html
before_script:
# set pip cache to the Docker volume
- echo ${CI_PROJECT_DIR}
- export PIP_CACHE_DIR="/opt/cache/pip"
- conda config --append channels conda-forge
- conda config --add channels defaults
- conda config --add channels http://bcu-ci.esrf.fr/stable
- conda config --add channels tango-controls
default:
before_script:
# set pip cache to the Docker volume
- echo ${CI_PROJECT_DIR}
- export PIP_CACHE_DIR="/opt/cache/pip"
- /opt/conda/bin/conda init && source /root/.bashrc
- conda config --append channels conda-forge
- conda config --add channels defaults
- conda config --add channels http://bcu-ci.esrf.fr/stable
- conda config --add channels tango-controls
stages:
- style
......@@ -19,49 +21,107 @@ check_style:
image: continuumio/miniconda3:latest
script:
- pip install -r requirements-dev.txt
# run black
- LC_ALL=C.UTF-8 black --check --safe .
run_tests:source:
check_lint:
stage: style
image: continuumio/miniconda3:latest
script:
- pip install -r requirements-dev.txt
# run flake8 on diff between current branch and last common ancestor with master
- git diff -U0 origin/master...$CI_COMMIT_SHA | flake8 --diff
# allow failure without impacting the rest of the CI (will show an orange warning in the UI)
allow_failure: true
.template_test_source:
stage: tests
image: continuumio/miniconda3:latest
script:
script:
- echo ${CHANGES}
- >
if [ $CI_COMMIT_REF_NAME != 'master' ]; then
# Check for changes manually to circumvent gitlab-ci limitation (issue #1115)
echo 'Looking for changes...'
if ! (git diff --name-only origin/master...$CI_COMMIT_SHA | grep -E "$CHANGES"); then
echo 'Nothing to do'
exit 0
fi
fi
# install Xvfb and opengl libraries (needed for test_flint)
- apt-get update && apt-get -y install xvfb libxi6
# create test env and install BLISS
- conda create --quiet --name testenv --file requirements-conda.txt --file requirements-test-conda.txt
- source activate testenv
- pip install git+https://github.com/manahl/pytest-plugins#subdirectory=pytest-profiling
- conda install pytest-profiling --yes
- pip install .
# run tests on source
- python setup.py test --addopts "--cov bliss --cov-report html --cov-report term --profile --durations=30"
- echo ${PYTEST_ARGS}
- python setup.py test --addopts "$PYTEST_ARGS"
variables:
CHANGES: '\.(py|cfg)$|requirements|gitlab-ci|^(bin|extensions|scripts|spec|tests)/'
test_bliss:
# Run bliss tests without coverage for any branches except the master
extends: .template_test_source
except:
- master
variables:
PYTEST_ARGS: '--durations=30'
test_writer:
# Run hdf5 writer tests without coverage for any branches except the master
extends: .template_test_source
except:
- master
variables:
PYTEST_ARGS: '--durations=30 -m writer --runwritertests'
allow_failure: true
test_bliss_cov:
# Run bliss tests with coverage for master only
extends: .template_test_source
only:
- master
artifacts:
paths:
- htmlcov/
expire_in: 7 days
after_script:
- python scripts/profiling2txt.py
- sh scripts/print_test_profiling.sh
variables:
PYTEST_ARGS: '--cov bliss --cov-report html:htmlcov --cov-report term --profile --durations=30'
test_writer_cov:
# Run hdf5 writer tests with coverage for master only
extends: .template_test_source
only:
- master
artifacts:
paths:
- htmlcov/
- htmlcov_writer/
expire_in: 7 days
only:
changes: # skip tests for doc changes
- "bin/**/*"
- "bliss/**/*"
- "conda_recipe/**/*"
- "extensions/**/*"
- "scripts/**/*"
- "spec/**/*"
- "tests/**/*"
- ".gitlab-ci.yml"
- "requirements*"
- "setup.*"
after_script:
- python scripts/profiling2txt.py
- sh scripts/print_test_profiling.sh
variables:
PYTEST_ARGS: '--cov nexus_writer_service --cov-report html:htmlcov_writer --cov-report term --profile --durations=30 -m writer --runwritertests'
allow_failure: true
package:
stage: build
image: continuumio/miniconda3:latest
script:
# create package env and install all requirements and conda-build
# install opengl libraries (needed to avoid problem with pyopengl dependency)
- apt-get update && apt-get -y install libgl1-mesa-glx
# create package env and install all requirements and conda-build, (gcc and g++ compiler required for flint)
- conda create --quiet --name buildenv --file requirements-conda.txt --file requirements-test-conda.txt conda-build
- source activate buildenv
# create links to reach prefixed compilers of conda
- ln -s /opt/conda/envs/buildenv/bin/x86_64-conda_cos6-linux-gnu-gcc /opt/conda/envs/buildenv/bin/gcc
- ln -s /opt/conda/envs/buildenv/bin/x86_64-conda_cos6-linux-gnu-g++ /opt/conda/envs/buildenv/bin/g++
# triggering the creation of bliss/release.py file
- python -c "from setup import generate_release_file;generate_release_file()"
# creating the meta.yaml file for conda packet generation
......@@ -81,10 +141,12 @@ package:
only:
- tags
create_doc:reference:
create_reference_doc:
stage: build
image: continuumio/miniconda3:latest
script:
# install opengl libraries (needed to avoid problem with pyopengl dependency)
- apt-get update && apt-get -y install libgl1-mesa-glx
# create doc env and install all requirements
- conda create -q --yes --name docenv --file requirements-conda.txt --file requirements-doc-conda.txt
- source activate docenv
......@@ -96,22 +158,24 @@ create_doc:reference:
- build/
expire_in: 7 days
create_doc:user:
create_user_doc:
stage: build
image: continuumio/miniconda3:latest
script:
# install opengl libraries (needed to avoid problem with pyopengl dependency)
- apt-get update && apt-get -y install libgl1-mesa-glx
# create doc env and install all requirements
- conda create -q --yes --name mkdocsenv --file requirements-conda.txt --file requirements-doc-conda.txt
- source activate mkdocsenv
- pip install -r requirements-doc.txt
# build of documentation
- cd doc && mkdocs build
- pip install -r requirements-doc.txt
# build of documentation (-s : strict : fail on warnings)
- cd doc && mkdocs build -s
artifacts:
paths:
- doc/site
expire_in: 7 days
run_tests:package:
.template_test_package:
stage: package_tests
image: continuumio/miniconda3:latest
script:
......@@ -121,22 +185,50 @@ run_tests:package:
- conda create -y --name testenv
- source activate testenv
- conda install bliss==$CI_COMMIT_TAG --file requirements-test-conda.txt --channel file://${CI_PROJECT_DIR}/conda-local-channel
- pytest --cov=bliss --cov-report html --cov-report term
- echo ${PYTEST_ARGS}
- pytest ${PYTEST_ARGS}
test_bliss_package:
# Run bliss tests using the bliss conda package
extends: .template_test_package
only:
- tags
artifacts:
paths:
- htmlcov/
expire_in: 7 days
variables:
PYTEST_ARGS: '--cov bliss --cov-report html:htmlcov --cov-report term'
test_writer_package:
# Run HDF5 writer tests using the bliss conda package
extends: .template_test_package
only:
- tags
artifacts:
paths:
- htmlcov_writer/
expire_in: 7 days
variables:
PYTEST_ARGS: '--cov nexus_writer_service --cov-report html:htmlcov_writer --cov-report term -m writer --runwritertests'
pages:
stage: deploy
tags:
before_script:
- ''
tags:
- conda
- builder
- linux
script:
# Publishing to intranet folders
# Preparing
- git fetch --tags
- mkdir -p public
# Make a copy of existing documentation on gitlab webpages
# Workaround as gitlab does not manage different versions of documentation
- conda install -y wget
- sh .gitlab-ci-docs-publish.sh master $(git tag)
# Publishing documentation for the actual version
# if we have a tag it will be published under the intranet gitlab page under /tag/ otherwhise under /master/
- if [[ ( $CI_COMMIT_REF_NAME == master && -z $CI_COMMIT_TAG ) ]]; then export DOC_DIR='master'; else export DOC_DIR=${CI_COMMIT_TAG}; fi
#- rm -rf public/* # target dir should be cleaned at first time
......@@ -153,10 +245,12 @@ pages:
only:
- tags
- master
deploy_bliss:
stage: deploy
tags:
before_script:
- ''
tags:
- conda
- builder
- linux
......
repos:
- repo: https://github.com/ambv/black
- repo: https://github.com/psf/black
rev: 18.6b4
hooks:
- id: black
language_version: python3 # Should be >= 3.6
\ No newline at end of file
language_version: python3 # Should be >= 3.6
- repo: local
hooks:
- id: flake8-diff
name: flake8-diff
description: Run flake8 linter on diff lines of stage area
language: script
entry: ./flake8-diff.sh
verbose: true # force output when the hook passes
[MASTER]
extension-pkg-whitelist=PyQt5
This diff is collapsed.
Bliss
======
[![build status](https://gitlab.esrf.fr/bliss/bliss/badges/master/build.svg)](http://bliss.gitlab-pages.esrf.fr/bliss)
[![coverage report](https://gitlab.esrf.fr/bliss/bliss/badges/master/coverage.svg)](http://bliss.gitlab-pages.esrf.fr/bliss/htmlcov)
[![build status](https://gitlab.esrf.fr/bliss/bliss/badges/master/build.svg)](https://gitlab.esrf.fr/bliss/bliss/pipelines/master/latest)
[![coverage report](https://gitlab.esrf.fr/bliss/bliss/badges/master/coverage.svg)](https://bliss.gitlab-pages.esrf.fr/bliss/master/htmlcov)
The bliss control library.
Latest documentation from master can be found [here](http://bliss.gitlab-pages.esrf.fr/bliss/master)
Latest documentation from master can be found [here](https://bliss.gitlab-pages.esrf.fr/bliss/master)
In short
========
--------
To update BLISS from source:
```
conda install --file ./requirements-conda.txt
pip install --no-deps -e .
```
conda install --file ./requirements-conda.txt
exit and re-enter into conda environment
pip install --no-deps -e .
```
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2015-2019 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
"""
Usage: sps_data_watch [(-s | --session)] <name>...
sps_data_watch (-h | --help)
Options:
-s, --session Follow data from session(s).
-h, --help Show this screen.
"""
import docopt
import gevent
import json
from bliss.common.utils import OrderedDict
try:
import sps
except ImportError:
try:
from PyMca5.PyMcaIO import sps
except ImportError: # last chance
from PyMca import sps
from bliss.data.node import DataNodeIterator, _get_or_create_node, is_zerod
ARRAY_NAME = "SCAN_D"
class _MaxPointReach(Exception):
pass
class _MaxCounterReach(Exception):
pass
class _NewCounter(Exception):
pass
def copy_data(zerod, zerod_index, session_name, max_nb_counter, max_nb_points):
data_channel = zerod
channel_name = zerod.name
previous_cnt_number = len(zerod_index)
channel_info = zerod_index.setdefault(
channel_name, {"from": 0, "index": len(zerod_index)}
)
if len(zerod_index) > max_nb_counter:
raise _MaxCounterReach()
elif previous_cnt_number != len(zerod_index):
raise _NewCounter()
from_index = channel_info.get("from")
data = data_channel.get(from_index, -1)
nb_points = from_index + len(data)
if nb_points > max_nb_points:
raise _MaxPointReach()
channel_info["from"] = nb_points
if len(data):
sps_index = channel_info["index"]
sps_data = sps.getdata(session_name, ARRAY_NAME)
sps_data.T[sps_index][from_index : from_index + len(data)] = data
sps.putdata(session_name, ARRAY_NAME, sps_data)
min_index = nb_points
for channel_name, channel_info in zerod_index.items():
point_nb = channel_info.get("from")
if point_nb < min_index:
min_index = point_nb
sps.putinfo(session_name, ARRAY_NAME, json.dumps((min_index, "running")))
def new_scanno(last={"scanno": 0}):
last["scanno"] += 1
return last["scanno"]
def watch_data(scan_node, session_name):
npoints = max(scan_node.info.get("npoints"), 4096)
title = scan_node.info.get("title") or "unknown scan"
nbcounters = 64
while True:
try:
scan_data_iterator = DataNodeIterator(scan_node)
pubsub = scan_data_iterator.children_event_register()
sps.create(session_name, ARRAY_NAME, npoints, nbcounters)
zerod_index = OrderedDict()
for channel in scan_data_iterator.walk(filter="channel", wait=False):
if is_zerod(channel):
channel_name = channel.name
zerod_index.setdefault(
channel_name, {"from": 0, "index": len(zerod_index)}
)
copy_data(channel, zerod_index, session_name, nbcounters, npoints)
allcountersdict = dict([(i, "") for i in range(nbcounters)])
allcountersdict.update(
dict(((i, name) for i, name in enumerate(zerod_index)))
)
sps.putmetadata(
session_name,
ARRAY_NAME,
json.dumps(
(
allcountersdict,
{
"npts": npoints,
"allmotorm": "",
"scanno": new_scanno(),
"datafile": "bliss",
"spec": session_name,
"columnnames": list(zerod_index.keys())
+ [""] * (nbcounters - len(list(zerod_index.keys()))),
"columns": len(list(zerod_index.keys())),
"selectedcounters": ";".join(list(zerod_index.keys())),
},
{},
)
),
)
for event_type, zerod in scan_data_iterator.wait_for_event(
pubsub, filter="channel"
):
if not is_zerod(zerod):
continue
if event_type is scan_data_iterator.EVENTS.NEW_CHILD:
copy_data(zerod, zerod_index, session_name, nbcounters, npoints)
elif event_type is scan_data_iterator.EVENTS.NEW_DATA_IN_CHANNEL:
copy_data(zerod, zerod_index, session_name, nbcounters, npoints)
except _MaxCounterReach:
nbcounters *= 2
except _MaxPointReach:
if npoints < 10000:
npoints *= 32
else:
npoints *= 2
except _NewCounter:
pass
def watch_session(session_name):
session_node = _get_or_create_node(session_name, node_type="session")
if session_node is not None:
data_iterator = DataNodeIterator(session_node)
pubsub = data_iterator.children_event_register()
last_scan_node = None
for last_scan_node in data_iterator.walk(filter="scan", wait=False):
pass
watch_data_task = None
if last_scan_node is not None:
watch_data_task = gevent.spawn(watch_data, last_scan_node, session_name)
for event_type, scan_node in data_iterator.wait_for_event(
pubsub, filter="scan"
):
if event_type != data_iterator.EVENTS.NEW_CHILD:
continue
if watch_data_task:
watch_data_task.kill()
watch_data_task = gevent.spawn(watch_data, scan_node, session_name)
def main():
try:
# Parse arguments, use file docstring as a parameter definition
arguments = docopt.docopt(__doc__)
sessions_name = arguments["<name>"]
except docopt.DocoptExit as e:
print(e.message)
else:
tasks = list()
for session in sessions_name:
tasks.append(gevent.spawn(watch_session, session))
try:
gevent.joinall(tasks)
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
......@@ -19,8 +19,8 @@
scanning
shell
tango
flint
"""
from . import release
__version__ = release.version
......@@ -28,10 +28,78 @@ __author__ = release.author
__license__ = release.license
version_info = release.version_info
from gevent import monkey
from gevent import monkey as _monkey
_monkey.patch_all(thread=False)
from bliss.common.proxy import Proxy as _Proxy
import atexit
def get_current_session():
from bliss.common import session
return session.get_current_session()
current_session = _Proxy(get_current_session)
from bliss.common.alias import MapWithAliases as _MapWithAliases
global_map = _MapWithAliases(current_session)
atexit.register(global_map.clear)
from bliss.common.logtools import Log as _Log
global_log = _Log(map=global_map)
def logging_startup(
log_level="WARNING", fmt="%(levelname)s %(asctime)-15s %(name)s: %(message)s"
):
"""
Provides basicConfig functionality to bliss activating at proper level the root loggers
"""
import logging # this is not to pollute the global namespace
# save log messages format
global_log.set_log_format(fmt)
global_log._LOG_DEFAULT_LEVEL = log_level # to restore level of non-BlissLoggers
# setting startup level for session and bliss logger
logging.getLogger("session").setLevel(log_level)
logging.getLogger("bliss").setLevel(log_level)
logging.getLogger("flint").setLevel(log_level)
# install an additional handler, only for debug messages
# (debugon / debugoff)
global_log.start_stdout_handler()
# Beacon logging handler through SocketServer
from bliss.config.conductor.client import get_log_server_address
try:
host, port = get_log_server_address()
except RuntimeError:
pass
else:
global_log.start_beacon_handler((host, port))
# Bliss shell mode False indicates Bliss in running in library mode
_BLISS_SHELL_MODE = False
monkey.patch_all(thread=False)
def set_bliss_shell_mode(mode=True):
"""
Set Bliss shell mode
"""
global _BLISS_SHELL_MODE
_BLISS_SHELL_MODE = mode
from redis import selector
selector._DEFAULT_SELECTOR = selector.SelectSelector
def is_bliss_shell():
"""
Tells if Bliss is running in shell or library mode
"""
return _BLISS_SHELL_MODE
......@@ -7,8 +7,8 @@
from warnings import warn
from .embl import ExporterClient
from bliss.common.logtools import LogMixin
from bliss.common import session
from bliss.common.logtools import *
from bliss import global_map
import gevent
from gevent.queue import Queue
......@@ -27,7 +27,7 @@ def start_exporter(address, port, timeout=3, retries=1):
return exporter_clients[(address, port)]
class Exporter(ExporterClient.ExporterClient, LogMixin):
class Exporter(ExporterClient.ExporterClient):
STATE_EVENT = "State"
STATUS_EVENT = "Status"
VALUE_EVENT = "Value"
......@@ -59,7 +59,7 @@ class Exporter(ExporterClient.ExporterClient, LogMixin):
self.events_queue = Queue()
self.events_processing_task = None
session.get_current().map.register(
global_map.register(
self, parents_list=["comms"], tag=f"exporter: {address}:{port}"
)
......@@ -145,8 +145,9 @@ class Exporter(ExporterClient.ExporterClient, LogMixin):
try:
cb(self._to_python_value(value))
except:
self._logger.exception(
"Exception while executing callback %s for event %s", cb, name
log_exception(
self,
f"Exception while executing callback {cb} for event {name}",
)
continue
......
......@@ -21,3 +21,5 @@ This module gathers different communication interfaces
tcp
util
"""
from bliss.comm.util import get_comm
This diff is collapsed.
......@@ -35,8 +35,8 @@
import socket, sys
from struct import *
from bliss.common import session
from bliss.common.logtools import LogMixin
from bliss.common.logtools import *