Skip to content
Permalink
Browse files

Merge pull request #2546 from volaya/ux_improvements

[processing] UI/UX improvements
  • Loading branch information
alexbruy committed Jan 8, 2016
2 parents 95a56b9 + 931ca4d commit acf74932f5b41ac515d2f19fb54070a044b077cd
Showing with 6,822 additions and 708 deletions.
  1. +1 −1 python/ext-libs/CMakeLists.txt
  2. +315 −0 python/ext-libs/yaml/__init__.py
  3. +139 −0 python/ext-libs/yaml/composer.py
  4. +675 −0 python/ext-libs/yaml/constructor.py
  5. +85 −0 python/ext-libs/yaml/cyaml.py
  6. +62 −0 python/ext-libs/yaml/dumper.py
  7. +1,140 −0 python/ext-libs/yaml/emitter.py
  8. +75 −0 python/ext-libs/yaml/error.py
  9. +86 −0 python/ext-libs/yaml/events.py
  10. +40 −0 python/ext-libs/yaml/loader.py
  11. +49 −0 python/ext-libs/yaml/nodes.py
  12. +589 −0 python/ext-libs/yaml/parser.py
  13. +190 −0 python/ext-libs/yaml/reader.py
  14. +484 −0 python/ext-libs/yaml/representer.py
  15. +224 −0 python/ext-libs/yaml/resolver.py
  16. +1,457 −0 python/ext-libs/yaml/scanner.py
  17. +111 −0 python/ext-libs/yaml/serializer.py
  18. +104 −0 python/ext-libs/yaml/tokens.py
  19. +1 −0 python/plugins/processing/algs/CMakeLists.txt
  20. +5 −5 python/plugins/processing/algs/gdal/GdalAlgorithm.py
  21. +1 −1 python/plugins/processing/algs/gdal/GdalAlgorithmDialog.py
  22. +3 −3 python/plugins/processing/algs/gdal/extractprojection.py
  23. +12 −7 python/plugins/processing/algs/gdal/translate.py
  24. +3 −0 python/plugins/processing/algs/grass/GrassAlgorithmProvider.py
  25. +3 −0 python/plugins/processing/algs/grass7/Grass7AlgorithmProvider.py
  26. +5 −0 python/plugins/processing/algs/help/CMakeLists.txt
  27. +32 −0 python/plugins/processing/algs/help/__init__.py
  28. +439 −0 python/plugins/processing/algs/help/qgis.yaml
  29. +3 −0 python/plugins/processing/algs/otb/OTBAlgorithmProvider.py
  30. +1 −1 python/plugins/processing/algs/qgis/FieldsCalculator.py
  31. +21 −8 python/plugins/processing/algs/saga/SagaAlgorithm212.py
  32. +2 −2 python/plugins/processing/algs/saga/SagaAlgorithmProvider.py
  33. +0 −93 python/plugins/processing/algs/saga/SagaGroupNameDecorator.py
  34. +156 −0 python/plugins/processing/algs/saga/SagaNameDecorator.py
  35. +1 −1 python/plugins/processing/algs/saga/SplitRGBBands.py
  36. +0 −185 python/plugins/processing/algs/saga/saga_version_check.txt
  37. +7 −8 python/plugins/processing/core/AlgorithmProvider.py
  38. +14 −43 python/plugins/processing/core/GeoAlgorithm.py
  39. +0 −2 python/plugins/processing/core/Processing.py
  40. +0 −21 python/plugins/processing/gui/AlgorithmClassification.py
  41. +31 −28 python/plugins/processing/gui/AlgorithmDialogBase.py
  42. +2 −0 python/plugins/processing/gui/BatchAlgorithmDialog.py
  43. +93 −108 python/plugins/processing/gui/ProcessingToolbox.py
  44. +30 −129 python/plugins/processing/modeler/ModelerDialog.py
  45. +1 −2 python/plugins/processing/tools/translation.py
  46. +84 −58 python/plugins/processing/ui/DlgAlgorithmBase.ui
  47. +46 −2 python/plugins/processing/ui/ProcessingToolbox.ui
@@ -33,7 +33,7 @@ MACRO(EXT_PYLIB lib)
ENDIF(WITH_INTERNAL_${ulib})
ENDMACRO(EXT_PYLIB lib)

FOREACH(pkg httplib2 jinja2 markupsafe owslib pygments dateutil pytz)
FOREACH(pkg httplib2 jinja2 markupsafe owslib pygments dateutil pytz yaml)
EXT_PYLIB(${pkg})
ENDFOREACH(pkg)

@@ -0,0 +1,315 @@

from error import *

from tokens import *
from events import *
from nodes import *

from loader import *
from dumper import *

__version__ = '3.11'

try:
from cyaml import *
__with_libyaml__ = True
except ImportError:
__with_libyaml__ = False

def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose()

def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose()

def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
try:
return loader.get_single_node()
finally:
loader.dispose()

def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
try:
while loader.check_node():
yield loader.get_node()
finally:
loader.dispose()

def load(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
loader = Loader(stream)
try:
return loader.get_single_data()
finally:
loader.dispose()

def load_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
loader = Loader(stream)
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose()

def safe_load(stream):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
return load(stream, SafeLoader)

def safe_load_all(stream):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
return load_all(stream, SafeLoader)

def emit(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
from StringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()

def serialize_all(nodes, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()

def serialize(node, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
return serialize_all([node], stream, Dumper=Dumper, **kwds)

def dump_all(documents, stream=None, Dumper=Dumper,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, default_style=default_style,
default_flow_style=default_flow_style,
canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()

def dump(data, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=Dumper, **kwds)

def safe_dump_all(documents, stream=None, **kwds):
"""
Serialize a sequence of Python objects into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)

def safe_dump(data, stream=None, **kwds):
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=SafeDumper, **kwds)

def add_implicit_resolver(tag, regexp, first=None,
Loader=Loader, Dumper=Dumper):
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
the corresponding tag is assigned to the scalar.
first is a sequence of possible initial characters or None.
"""
Loader.add_implicit_resolver(tag, regexp, first)
Dumper.add_implicit_resolver(tag, regexp, first)

def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
to a node in the representation tree.
Keys can be string values, integers, or None.
"""
Loader.add_path_resolver(tag, path, kind)
Dumper.add_path_resolver(tag, path, kind)

def add_constructor(tag, constructor, Loader=Loader):
"""
Add a constructor for the given tag.
Constructor is a function that accepts a Loader instance
and a node object and produces the corresponding Python object.
"""
Loader.add_constructor(tag, constructor)

def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
Multi-constructor accepts a Loader instance, a tag suffix,
and a node object and produces the corresponding Python object.
"""
Loader.add_multi_constructor(tag_prefix, multi_constructor)

def add_representer(data_type, representer, Dumper=Dumper):
"""
Add a representer for the given type.
Representer is a function accepting a Dumper instance
and an instance of the given data type
and producing the corresponding representation node.
"""
Dumper.add_representer(data_type, representer)

def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
"""
Add a representer for the given type.
Multi-representer is a function accepting a Dumper instance
and an instance of the given data type or subtype
and producing the corresponding representation node.
"""
Dumper.add_multi_representer(data_type, multi_representer)

class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
def __init__(cls, name, bases, kwds):
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
cls.yaml_dumper.add_representer(cls, cls.to_yaml)

class YAMLObject(object):
"""
An object that can dump itself to a YAML stream
and load itself from a YAML stream.
"""

__metaclass__ = YAMLObjectMetaclass
__slots__ = () # no direct instantiation, so allow immutable subclasses

yaml_loader = Loader
yaml_dumper = Dumper

yaml_tag = None
yaml_flow_style = None

def from_yaml(cls, loader, node):
"""
Convert a representation node to a Python object.
"""
return loader.construct_yaml_object(node, cls)
from_yaml = classmethod(from_yaml)

def to_yaml(cls, dumper, data):
"""
Convert a Python object to a representation node.
"""
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
flow_style=cls.yaml_flow_style)
to_yaml = classmethod(to_yaml)

0 comments on commit acf7493

Please sign in to comment.
You can’t perform that action at this time.