Commit df4a7e09 authored by payno's avatar payno

format file to `black` style

parent 3406b794
from .scheme.scheme import Scheme, SubScheme
from .scheme.node import Node
from .scheme.link import Link
\ No newline at end of file
from .scheme.link import Link
......@@ -30,6 +30,7 @@ __date__ = "29/05/2017"
from typing import Union
import logging
_logger = logging.getLogger(__name__)
......@@ -58,14 +59,20 @@ class Link(object):
:param str sink_channel: channel name used for connection
"""
_JSON_SOURCE_CHANNEL = 'source_channel'
_JSON_SINK_CHANNEL = 'sink_channel'
_JSON_SOURCE_NODE_ID = 'source_node_id'
_JSON_SINK_NODE_ID = 'sink_node_id'
_JSON_LINK_ID = 'link_id'
def __init__(self, source_node, sink_node, source_channel: str = 'default',
sink_channel: str = 'default', id=None):
_JSON_SOURCE_CHANNEL = "source_channel"
_JSON_SINK_CHANNEL = "sink_channel"
_JSON_SOURCE_NODE_ID = "source_node_id"
_JSON_SINK_NODE_ID = "sink_node_id"
_JSON_LINK_ID = "link_id"
def __init__(
self,
source_node,
sink_node,
source_channel: str = "default",
sink_channel: str = "default",
id=None,
):
self.id = get_next_link_free_id() if id is None else id
if isinstance(source_node, int):
self.source_node_id = source_node
......@@ -107,46 +114,56 @@ class Link(object):
# load link id
if Link._JSON_LINK_ID not in json_data:
_id = None
_logger.error('Missing link id information')
_logger.error("Missing link id information")
else:
_id = json_data[Link._JSON_LINK_ID]
# load sink channel
if Link._JSON_SINK_CHANNEL not in json_data:
sink_channel = None
_logger.error('Missing sink channel information')
_logger.error("Missing sink channel information")
else:
sink_channel = json_data[Link._JSON_SINK_CHANNEL]
# load source channel
if Link._JSON_SOURCE_CHANNEL not in json_data:
source_channel = None
_logger.error('Missing source channel information')
_logger.error("Missing source channel information")
else:
source_channel = json_data[Link._JSON_SOURCE_CHANNEL]
# load sink node id
if Link._JSON_SINK_NODE_ID not in json_data:
sink_node_id = None
_logger.error('Missing source node id information')
_logger.error("Missing source node id information")
else:
sink_node_id = json_data[Link._JSON_SINK_NODE_ID]
# load source node id
if Link._JSON_SOURCE_NODE_ID not in json_data:
source_node_id = None
_logger.error('Missing source node id information')
_logger.error("Missing source node id information")
else:
source_node_id = json_data[Link._JSON_SOURCE_NODE_ID]
if (sink_channel is None or source_channel is None or _id is None or
source_node_id is None or sink_node_id is None):
raise ValueError('Missing core information for creating a Link')
if (
sink_channel is None
or source_channel is None
or _id is None
or source_node_id is None
or sink_node_id is None
):
raise ValueError("Missing core information for creating a Link")
else:
return Link(id=_id, sink_channel=sink_channel,
source_channel=source_channel,
source_node=source_node_id,
sink_node=sink_node_id)
return Link(
id=_id,
sink_channel=sink_channel,
source_channel=source_channel,
source_node=source_node_id,
sink_node=sink_node_id,
)
def __str__(self):
return "node %s: source:(%s, %s), sink:(%s, %s)" % (self.id,
self.source_node_id,
self.source_channel,
self.sink_node_id,
self.sink_channel)
\ No newline at end of file
return "node %s: source:(%s, %s), sink:(%s, %s)" % (
self.id,
self.source_node_id,
self.source_channel,
self.sink_node_id,
self.sink_channel,
)
__authors__ = ["Bioinformatics Laboratory, University of Ljubljana", "H.Payno"]
__license__ = "[GNU GPL v3+]: https://www.gnu.org/licenses/gpl-3.0.en.html"
__date__ = "29/05/2017"
from xml.etree.ElementTree import parse
from .parser import _scheme, _node, _link, _nxNodeProperty, _nxRelation, _nxLink, _nxNode
from .parser import (
_scheme,
_node,
_link,
_nxNodeProperty,
_nxRelation,
_nxLink,
_nxNode,
)
import logging
from .scheme import Node, Scheme
from .parser import Parser
......@@ -51,12 +58,12 @@ class MomlParser(Parser):
for nx_link in nx_links:
link = getLink(nx_link.relation)
# TODO: for now some port / link type are not managed
if nx_link.port in ('In', 'Out', 'No mesh defined'):
logger.warning(nx_link.port + ' not managed yet')
if nx_link.port in ("In", "Out", "No mesh defined"):
logger.warning(nx_link.port + " not managed yet")
continue
else:
node_id, input_output = nx_link.port.split('.', -1)
if input_output.lower() in ('output', 'other', 'true'):
node_id, input_output = nx_link.port.split(".", -1)
if input_output.lower() in ("output", "other", "true"):
link["source_node_id"] = node_id
else:
link["sink_node_id"] = node_id
......@@ -71,14 +78,16 @@ class MomlParser(Parser):
for linkid, link in links.items():
# TODO: this condition is due from the case that some link are
# not managed yet.
if 'source_node_id' not in link or 'sink_node_id' not in link:
if "source_node_id" not in link or "sink_node_id" not in link:
continue
l = _link(id=link['id'],
source_node_id=link["source_node_id"],
sink_node_id=link["sink_node_id"],
source_channel=link["source_channel"],
sink_channel=link["sink_channel"],
enabled=True)
l = _link(
id=link["id"],
source_node_id=link["source_node_id"],
sink_node_id=link["sink_node_id"],
source_channel=link["source_channel"],
sink_channel=link["sink_channel"],
enabled=True,
)
orangeLinks.append(l)
return orangeLinks
......@@ -87,38 +96,47 @@ class MomlParser(Parser):
# Collect all nodes
for node in tree.findall(".//entity"):
node_id = node.get('name')
node_class = node.get('class')
node_id = node.get("name")
node_class = node.get("class")
node_properties = []
for property in node.findall("property"):
_property = _nxNodeProperty(name=property.get("name"),
class_=property.get("class"),
value=property.get("value"))
_property = _nxNodeProperty(
name=property.get("name"),
class_=property.get("class"),
value=property.get("value"),
)
node_properties.append(_property)
nodes.append(_nxNode(id=node_id,
class_=node_class,
properties=node_properties,
data=None,
qualified_name=node_class))
nodes.append(
_nxNode(
id=node_id,
class_=node_class,
properties=node_properties,
data=None,
qualified_name=node_class,
)
)
# collect all nx links
for link in tree.findall("link"):
_my_link = _nxLink(port=link.get("port"),
relation=link.get("relation"))
_my_link = _nxLink(port=link.get("port"), relation=link.get("relation"))
nx_links.append(_my_link)
# collect all nx relations
for relation in tree.findall("relation"):
relation_properties = []
for property in relation.findall("property"):
_property = _nxNodeProperty(name=property.get("name"),
class_=property.get("class"),
value=property.get("value"))
_property = _nxNodeProperty(
name=property.get("name"),
class_=property.get("class"),
value=property.get("value"),
)
relation_properties.append(_property)
_relation = _nxRelation(id=relation.get("name"),
class_=relation.get("class"),
properties=relation_properties)
_relation = _nxRelation(
id=relation.get("name"),
class_=relation.get("class"),
properties=relation_properties,
)
nx_relations.append(_relation)
links = convertToOrangeLinks(nx_links=nx_links, nx_relations=nx_relations)
......@@ -129,5 +147,5 @@ class MomlParser(Parser):
description=None,
nodes=nodes,
links=links,
annotations=None
)
\ No newline at end of file
annotations=None,
)
This diff is collapsed.
__authors__ = ["Bioinformatics Laboratory, University of Ljubljana", "H.Payno"]
__license__ = "[GNU GPL v3+]: https://www.gnu.org/licenses/gpl-3.0.en.html"
__date__ = "29/05/2017"
from xml.etree.ElementTree import parse
from .parser import _scheme, _node, _link, _data, _annotation, _text_params, _arrow_params
from .parser import (
_scheme,
_node,
_link,
_data,
_annotation,
_text_params,
_arrow_params,
)
from ast import literal_eval
import ast
import logging
......@@ -19,6 +26,7 @@ class OwsParser(Parser):
"""
Parser managing the .ows (orange) files
"""
@staticmethod
def scheme_load(file_, load_handlers=True):
"""
......@@ -56,7 +64,12 @@ class OwsParser(Parser):
node_id = node.get("id")
qualified_name = node.get("qualified_name")
if qualified_name in aliases:
logger.info('replace' + str(qualified_name) + 'by' + str(aliases[qualified_name]))
logger.info(
"replace"
+ str(qualified_name)
+ "by"
+ str(aliases[qualified_name])
)
qualified_name = aliases[qualified_name]
node = _node(
......@@ -67,7 +80,7 @@ class OwsParser(Parser):
project_name=node.get("project_name", None),
qualified_name=qualified_name,
version=node.get("version", ""),
data=properties.get(node_id, None)
data=properties.get(node_id, None),
)
nodes.append(node)
......@@ -107,7 +120,7 @@ class OwsParser(Parser):
annotation = _annotation(
id=annot.get("id"),
type="arrow",
params=_arrow_params((start, end), color)
params=_arrow_params((start, end), color),
)
annotations.append(annotation)
return _scheme(
......@@ -116,7 +129,7 @@ class OwsParser(Parser):
description=scheme.get("description"),
nodes=nodes,
links=links,
annotations=annotations
annotations=annotations,
)
@staticmethod
......@@ -133,11 +146,11 @@ class OwsParser(Parser):
version = "2.0"
if version == "1.0":
raise ValueError('old .ows version are not managed')
raise ValueError("old .ows version are not managed")
elif version == "2.0":
return OwsParser.parse_ows_etree_v_2_0(doc)
else:
raise ValueError('unrecognize scheme definition version')
raise ValueError("unrecognize scheme definition version")
# ---- TAKE back from Orange3 ---------
......@@ -160,12 +173,16 @@ def tuple_eval(source):
if not isinstance(node.body, ast.Tuple):
raise ValueError("%r is not a tuple literal" % source)
if not all(isinstance(el, (ast.Str, ast.Num)) or
# allow signed number literals in Python3 (i.e. -1|+1|-1.0)
(isinstance(el, ast.UnaryOp) and
isinstance(el.op, (ast.UAdd, ast.USub)) and
isinstance(el.operand, ast.Num))
for el in node.body.elts):
if not all(
isinstance(el, (ast.Str, ast.Num)) or
# allow signed number literals in Python3 (i.e. -1|+1|-1.0)
(
isinstance(el, ast.UnaryOp)
and isinstance(el.op, (ast.UAdd, ast.USub))
and isinstance(el.operand, ast.Num)
)
for el in node.body.elts
):
raise ValueError("Can only contain numbers or strings")
return literal_eval(source)
......@@ -195,12 +212,15 @@ def resolve_replaced(scheme_desc, registry):
# replace the nodes
nodes = scheme_desc.nodes
for i, node in list(enumerate(nodes)):
if not registry.has_widget(node.qualified_name) and \
node.qualified_name in replacements:
if (
not registry.has_widget(node.qualified_name)
and node.qualified_name in replacements
):
qname = replacements[node.qualified_name]
desc = registry.widget(qname)
nodes[i] = node._replace(qualified_name=desc.qualified_name,
project_name=desc.project_name)
nodes[i] = node._replace(
qualified_name=desc.qualified_name, project_name=desc.project_name
)
nodes_by_id[node.id] = nodes[i]
# replace links
......@@ -209,17 +229,13 @@ def resolve_replaced(scheme_desc, registry):
nsource = nodes_by_id[link.source_node_id]
nsink = nodes_by_id[link.sink_node_id]
_, source_rep = replacements_channels.get(
nsource.qualified_name, ({}, {}))
sink_rep, _ = replacements_channels.get(
nsink.qualified_name, ({}, {}))
_, source_rep = replacements_channels.get(nsource.qualified_name, ({}, {}))
sink_rep, _ = replacements_channels.get(nsink.qualified_name, ({}, {}))
if link.source_channel in source_rep:
link = link._replace(
source_channel=source_rep[link.source_channel])
link = link._replace(source_channel=source_rep[link.source_channel])
if link.sink_channel in sink_rep:
link = link._replace(
sink_channel=sink_rep[link.sink_channel])
link = link._replace(sink_channel=sink_rep[link.sink_channel])
links[i] = link
return scheme_desc._replace(nodes=nodes, links=links)
......@@ -61,17 +61,20 @@ class Parser(object):
return aliases
else:
import pkgutil
for importer, modname, ispkg in pkgutil.iter_modules(ppfaddon.__path__):
try:
mod_name = '.'.join((ppfaddon.__name__, modname, 'aliases'))
mod_name = ".".join((ppfaddon.__name__, modname, "aliases"))
module = importlib.import_module(mod_name)
except ImportError:
_logger.warning(modname + ' does not fit the add-on design, skip it')
_logger.warning(
modname + " does not fit the add-on design, skip it"
)
else:
if hasattr(module, 'aliases'):
new_aliases = getattr(module, 'aliases')
if hasattr(module, "aliases"):
new_aliases = getattr(module, "aliases")
if not isinstance(new_aliases, dict):
raise TypeError('aliases should be an instance of dict')
raise TypeError("aliases should be an instance of dict")
else:
aliases.update(new_aliases)
return aliases
......@@ -80,47 +83,47 @@ class Parser(object):
_scheme = namedtuple(
"_scheme",
["title", "version", "description", "nodes", "links", "annotations"])
"_scheme", ["title", "version", "description", "nodes", "links", "annotations"]
)
_node = namedtuple(
"_node",
["id", "title", "name", "position", "project_name", "qualified_name",
"version", "data"])
_data = namedtuple(
"_data",
["format", "data"])
[
"id",
"title",
"name",
"position",
"project_name",
"qualified_name",
"version",
"data",
],
)
_data = namedtuple("_data", ["format", "data"])
_link = namedtuple(
"_link",
["id", "source_node_id", "sink_node_id", "source_channel", "sink_channel",
"enabled"])
[
"id",
"source_node_id",
"sink_node_id",
"source_channel",
"sink_channel",
"enabled",
],
)
_annotation = namedtuple(
"_annotation",
["id", "type", "params"])
_annotation = namedtuple("_annotation", ["id", "type", "params"])
_text_params = namedtuple(
"_text_params",
["geometry", "text", "font"])
_text_params = namedtuple("_text_params", ["geometry", "text", "font"])
_arrow_params = namedtuple(
"_arrow_params",
["geometry", "color"])
_arrow_params = namedtuple("_arrow_params", ["geometry", "color"])
_nxNode = namedtuple(
"_node",
["id", "class_", "properties", "data", "qualified_name"])
_nxNode = namedtuple("_node", ["id", "class_", "properties", "data", "qualified_name"])
_nxNodeProperty = namedtuple(
"_property",
["name", "class_", "value"])
_nxNodeProperty = namedtuple("_property", ["name", "class_", "value"])
_nxLink = namedtuple(
"_nxLink",
["port", "relation"])
_nxLink = namedtuple("_nxLink", ["port", "relation"])
_nxRelation = namedtuple(
"_relation",
["id", "class_", 'properties'])
\ No newline at end of file
_nxRelation = namedtuple("_relation", ["id", "class_", "properties"])
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment