style: switching from yapf to isort/black
This commit is contained in:
17
.editorconfig
Normal file
17
.editorconfig
Normal file
@ -0,0 +1,17 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
||||
|
||||
[*.py]
|
||||
indent = ' '
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
line_length = 120
|
||||
multi_line_output = 5
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
26
Makefile
26
Makefile
@ -1,4 +1,4 @@
|
||||
# Generated by Medikit 0.6.1 on 2018-05-21.
|
||||
# Generated by Medikit 0.6.3 on 2018-08-11.
|
||||
# All changes will be overriden.
|
||||
# Edit Projectfile and run “make update” (or “medikit update”) to regenerate.
|
||||
|
||||
@ -26,12 +26,10 @@ SPHINX_BUILD ?= $(PYTHON_DIRNAME)/sphinx-build
|
||||
SPHINX_OPTIONS ?=
|
||||
SPHINX_SOURCEDIR ?= docs
|
||||
SPHINX_BUILDDIR ?= $(SPHINX_SOURCEDIR)/_build
|
||||
YAPF ?= $(PYTHON) -m yapf
|
||||
YAPF_OPTIONS ?= -rip
|
||||
SPHINX_AUTOBUILD ?= $(PYTHON_DIRNAME)/sphinx-autobuild
|
||||
MEDIKIT ?= $(PYTHON) -m medikit
|
||||
MEDIKIT_UPDATE_OPTIONS ?=
|
||||
MEDIKIT_VERSION ?= 0.6.1
|
||||
MEDIKIT_VERSION ?= 0.6.3
|
||||
|
||||
.PHONY: $(SPHINX_SOURCEDIR) clean format help install install-dev install-docker install-jupyter install-sqlalchemy medikit quick test update update-requirements watch-$(SPHINX_SOURCEDIR)
|
||||
|
||||
@ -44,7 +42,7 @@ else ifneq ($(QUICK),)
|
||||
@printf "Skipping \033[36m%s\033[0m because \033[36m$$QUICK\033[0m is not empty.\n" $(target)
|
||||
else
|
||||
@printf "Applying \033[36m%s\033[0m target...\n" $(target)
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=10.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=18.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U $(PYTHON_REQUIREMENTS_INLINE) -r $(PYTHON_REQUIREMENTS_FILE)
|
||||
@mkdir -p .medikit; touch $@
|
||||
endif
|
||||
@ -62,7 +60,7 @@ else ifneq ($(QUICK),)
|
||||
@printf "Skipping \033[36m%s\033[0m because \033[36m$$QUICK\033[0m is not empty.\n" $(target)
|
||||
else
|
||||
@printf "Applying \033[36m%s\033[0m target...\n" $(target)
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=10.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=18.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U $(PYTHON_REQUIREMENTS_DEV_INLINE) -r $(PYTHON_REQUIREMENTS_DEV_FILE)
|
||||
@mkdir -p .medikit; touch $@
|
||||
endif
|
||||
@ -79,7 +77,7 @@ else ifneq ($(QUICK),)
|
||||
@printf "Skipping \033[36m%s\033[0m because \033[36m$$QUICK\033[0m is not empty.\n" $(target)
|
||||
else
|
||||
@printf "Applying \033[36m%s\033[0m target...\n" $(target)
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=10.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=18.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U $(PYTHON_REQUIREMENTS_DOCKER_INLINE) -r $(PYTHON_REQUIREMENTS_DOCKER_FILE)
|
||||
@mkdir -p .medikit; touch $@
|
||||
endif
|
||||
@ -93,7 +91,7 @@ else ifneq ($(QUICK),)
|
||||
@printf "Skipping \033[36m%s\033[0m because \033[36m$$QUICK\033[0m is not empty.\n" $(target)
|
||||
else
|
||||
@printf "Applying \033[36m%s\033[0m target...\n" $(target)
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=10.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=18.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U $(PYTHON_REQUIREMENTS_JUPYTER_INLINE) -r $(PYTHON_REQUIREMENTS_JUPYTER_FILE)
|
||||
@mkdir -p .medikit; touch $@
|
||||
endif
|
||||
@ -107,7 +105,7 @@ else ifneq ($(QUICK),)
|
||||
@printf "Skipping \033[36m%s\033[0m because \033[36m$$QUICK\033[0m is not empty.\n" $(target)
|
||||
else
|
||||
@printf "Applying \033[36m%s\033[0m target...\n" $(target)
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=10.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U "pip ~=18.0" wheel
|
||||
$(PIP) install $(PIP_INSTALL_OPTIONS) -U $(PYTHON_REQUIREMENTS_SQLALCHEMY_INLINE) -r $(PYTHON_REQUIREMENTS_SQLALCHEMY_FILE)
|
||||
@mkdir -p .medikit; touch $@
|
||||
endif
|
||||
@ -118,15 +116,15 @@ test: install-dev ## Runs the test suite.
|
||||
$(SPHINX_SOURCEDIR): install-dev ##
|
||||
$(SPHINX_BUILD) -b html -D latex_paper_size=a4 $(SPHINX_OPTIONS) $(SPHINX_SOURCEDIR) $(SPHINX_BUILDDIR)/html
|
||||
|
||||
format: install-dev ## Reformats the whole python codebase using yapf.
|
||||
$(YAPF) $(YAPF_OPTIONS) .
|
||||
$(YAPF) $(YAPF_OPTIONS) Projectfile
|
||||
|
||||
watch-$(SPHINX_SOURCEDIR): ##
|
||||
$(SPHINX_AUTOBUILD) $(SPHINX_SOURCEDIR) $(shell mktemp -d)
|
||||
|
||||
format: ## Reformats the whole codebase using our standards (requires black and isort).
|
||||
black -l 120 --skip-string-normalization .
|
||||
isort -rc -o mondrian -o whistle -y .
|
||||
|
||||
medikit: # Checks installed medikit version and updates it if it is outdated.
|
||||
@$(PYTHON) -c 'import medikit, pip, sys; from packaging.version import Version; sys.exit(0 if (Version(medikit.__version__) >= Version("$(MEDIKIT_VERSION)")) and (Version(pip.__version__) < Version("10")) else 1)' || $(PYTHON) -m pip install -U "pip ~=10.0" "medikit>=$(MEDIKIT_VERSION)"
|
||||
@$(PYTHON) -c 'import medikit, pip, sys; from packaging.version import Version; sys.exit(0 if (Version(medikit.__version__) >= Version("$(MEDIKIT_VERSION)")) and (Version(pip.__version__) < Version("10")) else 1)' || $(PYTHON) -m pip install -U "pip ~=18.0" "medikit>=$(MEDIKIT_VERSION)"
|
||||
|
||||
update: medikit ## Update project artifacts using medikit.
|
||||
$(MEDIKIT) update $(MEDIKIT_UPDATE_OPTIONS)
|
||||
|
||||
26
Projectfile
26
Projectfile
@ -6,7 +6,6 @@ make = require('make')
|
||||
pytest = require('pytest')
|
||||
python = require('python')
|
||||
sphinx = require('sphinx')
|
||||
yapf = require('yapf')
|
||||
|
||||
python.setup(
|
||||
name='bonobo',
|
||||
@ -72,14 +71,29 @@ python.add_requirements(
|
||||
|
||||
@listen(make.on_generate)
|
||||
def on_make_generate(event):
|
||||
event.makefile['SPHINX_AUTOBUILD'] = '$(PYTHON_DIRNAME)/sphinx-autobuild'
|
||||
event.makefile.add_target(
|
||||
makefile = event.makefile
|
||||
|
||||
# Sphinx
|
||||
makefile['SPHINX_AUTOBUILD'] = '$(PYTHON_DIRNAME)/sphinx-autobuild'
|
||||
makefile.add_target(
|
||||
'watch-$(SPHINX_SOURCEDIR)',
|
||||
'''
|
||||
$(SPHINX_AUTOBUILD) $(SPHINX_SOURCEDIR) $(shell mktemp -d)
|
||||
''',
|
||||
'$(SPHINX_AUTOBUILD) $(SPHINX_SOURCEDIR) $(shell mktemp -d)',
|
||||
phony=True
|
||||
)
|
||||
|
||||
# Formating
|
||||
makefile.add_target(
|
||||
'format',
|
||||
'''
|
||||
black -l 120 --skip-string-normalization .
|
||||
isort -rc -o mondrian -o whistle -y .
|
||||
''',
|
||||
phony=True,
|
||||
doc='Reformats the whole codebase using our standards (requires black and isort).'
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# vim: ft=python:
|
||||
|
||||
@ -47,10 +47,9 @@ if __name__ == '__main__':
|
||||
|
||||
for i in 1, 2, 3:
|
||||
print(
|
||||
'j{}'.format(i),
|
||||
timeit.timeit("j{}({!r})".format(i, json_data), setup="from __main__ import j{}".format(i))
|
||||
'j{}'.format(i), timeit.timeit("j{}({!r})".format(i, json_data), setup="from __main__ import j{}".format(i))
|
||||
)
|
||||
print(
|
||||
'k{}'.format(i),
|
||||
timeit.timeit("k{}(**{!r})".format(i, json_data), setup="from __main__ import k{}".format(i))
|
||||
timeit.timeit("k{}(**{!r})".format(i, json_data), setup="from __main__ import k{}".format(i)),
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import os
|
||||
|
||||
from jinja2 import Environment, DictLoader
|
||||
from jinja2 import DictLoader, Environment
|
||||
|
||||
__path__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__), '..'))
|
||||
|
||||
@ -18,11 +18,7 @@ class Module:
|
||||
return '<{} ({})>'.format(self.title, self.name)
|
||||
|
||||
def asdict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'title': self.title,
|
||||
'automodule_options': self.automodule_options,
|
||||
}
|
||||
return {'name': self.name, 'title': self.title, 'automodule_options': self.automodule_options}
|
||||
|
||||
def get_path(self):
|
||||
return os.path.join(__path__, apidoc_root, *self.name.split('.')) + '.rst'
|
||||
@ -45,9 +41,9 @@ def underlined_filter(txt, chr):
|
||||
|
||||
|
||||
env = Environment(
|
||||
loader=DictLoader({
|
||||
'module':
|
||||
'''
|
||||
loader=DictLoader(
|
||||
{
|
||||
'module': '''
|
||||
{{ (':mod:`'~title~' <'~name~'>`') | underlined('=') }}
|
||||
|
||||
.. currentmodule:: {{ name }}
|
||||
@ -56,8 +52,12 @@ env = Environment(
|
||||
|
||||
.. automodule:: {{ name }}
|
||||
{% for opt in automodule_options %} :{{ opt }}:{{ "\n" }}{% endfor %}
|
||||
''' [1:-1] + '\n'
|
||||
})
|
||||
'''[
|
||||
1:-1
|
||||
]
|
||||
+ '\n'
|
||||
}
|
||||
)
|
||||
)
|
||||
env.filters['underlined'] = underlined_filter
|
||||
|
||||
|
||||
@ -31,9 +31,7 @@ def _repr_html_():
|
||||
' <div style="float: left; width: 20px; height: 20px;">{}</div>'
|
||||
' <pre style="white-space: nowrap; padding-left: 8px">{}</pre>'
|
||||
'</div>'
|
||||
).format(
|
||||
__logo__, '<br/>'.join(get_versions(all=True))
|
||||
)
|
||||
).format(__logo__, '<br/>'.join(get_versions(all=True)))
|
||||
|
||||
|
||||
del sys
|
||||
|
||||
@ -8,11 +8,11 @@ to another is maximal.
|
||||
"""
|
||||
|
||||
from bonobo.execution.strategies import create_strategy
|
||||
from bonobo.nodes import __all__ as _all_nodes
|
||||
from bonobo.nodes import *
|
||||
from bonobo.nodes import __all__ as _all_nodes
|
||||
from bonobo.structs import Graph
|
||||
from bonobo.util.api import ApiHelper
|
||||
from bonobo.util.environ import parse_args, get_argument_parser
|
||||
from bonobo.util.environ import get_argument_parser, parse_args
|
||||
|
||||
__all__ = []
|
||||
|
||||
@ -44,14 +44,17 @@ def run(graph, *, plugins=None, services=None, strategy=None):
|
||||
plugins = plugins or []
|
||||
|
||||
from bonobo import settings
|
||||
|
||||
settings.check()
|
||||
|
||||
if not settings.QUIET.get(): # pragma: no cover
|
||||
if _is_interactive_console():
|
||||
import mondrian
|
||||
|
||||
mondrian.setup(excepthook=True)
|
||||
|
||||
from bonobo.plugins.console import ConsoleOutputPlugin
|
||||
|
||||
if ConsoleOutputPlugin not in plugins:
|
||||
plugins.append(ConsoleOutputPlugin)
|
||||
|
||||
@ -60,6 +63,7 @@ def run(graph, *, plugins=None, services=None, strategy=None):
|
||||
from bonobo.contrib.jupyter import JupyterOutputPlugin
|
||||
except ImportError:
|
||||
import logging
|
||||
|
||||
logging.warning(
|
||||
'Failed to load jupyter widget. Easiest way is to install the optional "jupyter" '
|
||||
'dependencies with «pip install bonobo[jupyter]», but you can also install a specific '
|
||||
@ -70,6 +74,7 @@ def run(graph, *, plugins=None, services=None, strategy=None):
|
||||
plugins.append(JupyterOutputPlugin)
|
||||
|
||||
import logging
|
||||
|
||||
logging.getLogger().setLevel(settings.LOGGING_LEVEL.get())
|
||||
strategy = create_strategy(strategy)
|
||||
return strategy.execute(graph, plugins=plugins, services=services)
|
||||
@ -158,6 +163,7 @@ api.register_group(
|
||||
|
||||
def _is_interactive_console():
|
||||
import sys
|
||||
|
||||
return sys.stdout.isatty()
|
||||
|
||||
|
||||
@ -172,6 +178,7 @@ def _is_jupyter_notebook():
|
||||
def get_examples_path(*pathsegments):
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
return str(pathlib.Path(os.path.dirname(__file__), 'examples', *pathsegments))
|
||||
|
||||
|
||||
|
||||
@ -42,6 +42,7 @@ def entrypoint(args=None):
|
||||
logger.exception('Error while loading command {}.'.format(ext.name))
|
||||
|
||||
from stevedore import ExtensionManager
|
||||
|
||||
mgr = ExtensionManager(namespace='bonobo.commands')
|
||||
mgr.map(register_extension)
|
||||
|
||||
|
||||
@ -41,6 +41,7 @@ class BaseGraphCommand(BaseCommand):
|
||||
Base class for CLI commands that depends on a graph definition, either from a file or from a module.
|
||||
|
||||
"""
|
||||
|
||||
required = True
|
||||
handler = None
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import bonobo
|
||||
from bonobo.commands import BaseCommand
|
||||
from bonobo.registry import READER, WRITER, default_registry
|
||||
from bonobo.util.resolvers import _resolve_transformations, _resolve_options
|
||||
from bonobo.util.resolvers import _resolve_options, _resolve_transformations
|
||||
|
||||
|
||||
class ConvertCommand(BaseCommand):
|
||||
@ -11,21 +11,14 @@ class ConvertCommand(BaseCommand):
|
||||
parser.add_argument(
|
||||
'--' + READER,
|
||||
'-r',
|
||||
help='Choose the reader factory if it cannot be detected from extension, or if detection is wrong.'
|
||||
help='Choose the reader factory if it cannot be detected from extension, or if detection is wrong.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--' + WRITER,
|
||||
'-w',
|
||||
help=
|
||||
'Choose the writer factory if it cannot be detected from extension, or if detection is wrong (use - for console pretty print).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
'-l',
|
||||
type=int,
|
||||
help='Adds a Limit() after the reader instance.',
|
||||
default=None,
|
||||
help='Choose the writer factory if it cannot be detected from extension, or if detection is wrong (use - for console pretty print).',
|
||||
)
|
||||
parser.add_argument('--limit', '-l', type=int, help='Adds a Limit() after the reader instance.', default=None)
|
||||
parser.add_argument(
|
||||
'--transformation',
|
||||
'-t',
|
||||
@ -56,16 +49,16 @@ class ConvertCommand(BaseCommand):
|
||||
)
|
||||
|
||||
def handle(
|
||||
self,
|
||||
input_filename,
|
||||
output_filename,
|
||||
reader=None,
|
||||
reader_option=None,
|
||||
writer=None,
|
||||
writer_option=None,
|
||||
option=None,
|
||||
limit=None,
|
||||
transformation=None,
|
||||
self,
|
||||
input_filename,
|
||||
output_filename,
|
||||
reader=None,
|
||||
reader_option=None,
|
||||
writer=None,
|
||||
writer_option=None,
|
||||
option=None,
|
||||
limit=None,
|
||||
transformation=None,
|
||||
):
|
||||
reader_factory = default_registry.get_reader_factory_for(input_filename, format=reader)
|
||||
reader_kwargs = _resolve_options((option or []) + (reader_option or []))
|
||||
@ -75,13 +68,13 @@ class ConvertCommand(BaseCommand):
|
||||
writer_args = ()
|
||||
else:
|
||||
writer_factory = default_registry.get_writer_factory_for(output_filename, format=writer)
|
||||
writer_args = (output_filename, )
|
||||
writer_args = (output_filename,)
|
||||
writer_kwargs = _resolve_options((option or []) + (writer_option or []))
|
||||
|
||||
transformations = ()
|
||||
|
||||
if limit:
|
||||
transformations += (bonobo.Limit(limit), )
|
||||
transformations += (bonobo.Limit(limit),)
|
||||
|
||||
transformations += _resolve_transformations(transformation)
|
||||
|
||||
@ -92,8 +85,4 @@ class ConvertCommand(BaseCommand):
|
||||
writer_factory(*writer_args, **writer_kwargs),
|
||||
)
|
||||
|
||||
return bonobo.run(
|
||||
graph, services={
|
||||
'fs': bonobo.open_fs(),
|
||||
}
|
||||
)
|
||||
return bonobo.run(graph, services={'fs': bonobo.open_fs()})
|
||||
|
||||
@ -19,6 +19,7 @@ class RunCommand(BaseGraphCommand):
|
||||
|
||||
def parse_options(self, *, quiet=False, verbose=False, install=False, **options):
|
||||
from bonobo import settings
|
||||
|
||||
settings.QUIET.set_if_true(quiet)
|
||||
settings.DEBUG.set_if_true(verbose)
|
||||
self.install = install
|
||||
@ -65,4 +66,5 @@ def _install_requirements(requirements):
|
||||
# python interpreter.
|
||||
pip.utils.pkg_resources = importlib.reload(pip.utils.pkg_resources)
|
||||
import site
|
||||
|
||||
importlib.reload(site)
|
||||
|
||||
@ -32,10 +32,11 @@ class VersionCommand(BaseCommand):
|
||||
|
||||
def _format_version(mod, *, name=None, quiet=False):
|
||||
from bonobo.util.pkgs import bonobo_packages
|
||||
|
||||
args = {
|
||||
'name': name or mod.__name__,
|
||||
'version': mod.__version__,
|
||||
'location': bonobo_packages[name or mod.__name__].location
|
||||
'location': bonobo_packages[name or mod.__name__].location,
|
||||
}
|
||||
|
||||
if not quiet:
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
from bonobo.errors import AbstractError
|
||||
from bonobo.util import isoption, iscontextprocessor, sortedlist, get_name
|
||||
from bonobo.util import get_name, iscontextprocessor, isoption, sortedlist
|
||||
|
||||
__all__ = [
|
||||
'Configurable',
|
||||
]
|
||||
__all__ = ['Configurable']
|
||||
|
||||
get_creation_counter = lambda v: v._creation_counter
|
||||
|
||||
@ -64,10 +62,7 @@ class ConfigurableMeta(type):
|
||||
return cls.__processors_cache
|
||||
|
||||
def __repr__(self):
|
||||
return ' '.join((
|
||||
'<Configurable',
|
||||
super(ConfigurableMeta, self).__repr__().split(' ', 1)[1],
|
||||
))
|
||||
return ' '.join(('<Configurable', super(ConfigurableMeta, self).__repr__().split(' ', 1)[1]))
|
||||
|
||||
|
||||
try:
|
||||
@ -156,8 +151,10 @@ class Configurable(metaclass=ConfigurableMeta):
|
||||
if len(extraneous):
|
||||
raise TypeError(
|
||||
'{}() got {} unexpected option{}: {}.'.format(
|
||||
cls.__name__, len(extraneous), 's'
|
||||
if len(extraneous) > 1 else '', ', '.join(map(repr, sorted(extraneous)))
|
||||
cls.__name__,
|
||||
len(extraneous),
|
||||
's' if len(extraneous) > 1 else '',
|
||||
', '.join(map(repr, sorted(extraneous))),
|
||||
)
|
||||
)
|
||||
|
||||
@ -167,8 +164,10 @@ class Configurable(metaclass=ConfigurableMeta):
|
||||
if _final:
|
||||
raise TypeError(
|
||||
'{}() missing {} required option{}: {}.'.format(
|
||||
cls.__name__, len(missing), 's'
|
||||
if len(missing) > 1 else '', ', '.join(map(repr, sorted(missing)))
|
||||
cls.__name__,
|
||||
len(missing),
|
||||
's' if len(missing) > 1 else '',
|
||||
', '.join(map(repr, sorted(missing))),
|
||||
)
|
||||
)
|
||||
return PartiallyConfigured(cls, *args, **kwargs)
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import functools
|
||||
|
||||
import itertools
|
||||
|
||||
|
||||
|
||||
@ -113,8 +113,9 @@ class RemovedOption(Option):
|
||||
def clean(self, value):
|
||||
if value != self.value:
|
||||
raise ValueError(
|
||||
'Removed options cannot change value, {!r} must now be {!r} (and you should remove setting the value explicitely, as it is deprecated and will be removed quite soon.'.
|
||||
format(self.name, self.value)
|
||||
'Removed options cannot change value, {!r} must now be {!r} (and you should remove setting the value explicitely, as it is deprecated and will be removed quite soon.'.format(
|
||||
self.name, self.value
|
||||
)
|
||||
)
|
||||
return self.value
|
||||
|
||||
@ -195,9 +196,7 @@ class Method(Option):
|
||||
if not callable(value):
|
||||
raise TypeError(
|
||||
'Option {!r} ({}) is expecting a callable value, got {!r} object: {!r}.'.format(
|
||||
self.name,
|
||||
type(self).__name__,
|
||||
type(value).__name__, value
|
||||
self.name, type(self).__name__, type(value).__name__, value
|
||||
)
|
||||
)
|
||||
inst._options_values[self.name] = self.type(value) if self.type else value
|
||||
|
||||
@ -101,15 +101,17 @@ class ContextCurrifier:
|
||||
try:
|
||||
bound = self._bind(_input)
|
||||
except TypeError as exc:
|
||||
raise UnrecoverableTypeError((
|
||||
'Input of {wrapped!r} does not bind to the node signature.\n'
|
||||
'Args: {args}\n'
|
||||
'Input: {input}\n'
|
||||
'Kwargs: {kwargs}\n'
|
||||
'Signature: {sig}'
|
||||
).format(
|
||||
wrapped=self.wrapped, args=self.args, input=_input, kwargs=self.kwargs, sig=signature(self.wrapped)
|
||||
)) from exc
|
||||
raise UnrecoverableTypeError(
|
||||
(
|
||||
'Input of {wrapped!r} does not bind to the node signature.\n'
|
||||
'Args: {args}\n'
|
||||
'Input: {input}\n'
|
||||
'Kwargs: {kwargs}\n'
|
||||
'Signature: {sig}'
|
||||
).format(
|
||||
wrapped=self.wrapped, args=self.args, input=_input, kwargs=self.kwargs, sig=signature(self.wrapped)
|
||||
)
|
||||
) from exc
|
||||
return self.wrapped(*bound.args, **bound.kwargs)
|
||||
|
||||
def setup(self, *context):
|
||||
|
||||
@ -112,10 +112,12 @@ def create_container(services=None, factory=Container):
|
||||
|
||||
if not 'fs' in container:
|
||||
import bonobo
|
||||
|
||||
container.setdefault('fs', bonobo.open_fs())
|
||||
|
||||
if not 'http' in container:
|
||||
import requests
|
||||
|
||||
container.setdefault('http', requests)
|
||||
|
||||
return container
|
||||
@ -139,6 +141,7 @@ class Exclusive(ContextDecorator):
|
||||
ensure that.
|
||||
|
||||
"""
|
||||
|
||||
_locks = {}
|
||||
|
||||
def __init__(self, wrapped):
|
||||
|
||||
@ -9,7 +9,4 @@ This module contains all tools for Bonobo and Django to interract nicely.
|
||||
from .utils import create_or_update
|
||||
from .commands import ETLCommand
|
||||
|
||||
__all__ = [
|
||||
'ETLCommand',
|
||||
'create_or_update',
|
||||
]
|
||||
__all__ = ['ETLCommand', 'create_or_update']
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
from logging import getLogger
|
||||
from types import GeneratorType
|
||||
|
||||
from colorama import Back, Fore, Style
|
||||
from django.core.management import BaseCommand
|
||||
from django.core.management.base import OutputWrapper
|
||||
from mondrian import term
|
||||
|
||||
import bonobo
|
||||
from bonobo.plugins.console import ConsoleOutputPlugin
|
||||
from bonobo.util.term import CLEAR_EOL
|
||||
from colorama import Fore, Back, Style
|
||||
from django.core.management import BaseCommand
|
||||
from django.core.management.base import OutputWrapper
|
||||
from mondrian import term
|
||||
|
||||
from .utils import create_or_update
|
||||
|
||||
@ -55,7 +56,7 @@ class ETLCommand(BaseCommand):
|
||||
graph_coll = self.get_graph(*args, **options)
|
||||
|
||||
if not isinstance(graph_coll, GeneratorType):
|
||||
graph_coll = (graph_coll, )
|
||||
graph_coll = (graph_coll,)
|
||||
|
||||
for i, graph in enumerate(graph_coll):
|
||||
assert isinstance(graph, bonobo.Graph), 'Invalid graph provided.'
|
||||
|
||||
@ -41,14 +41,14 @@ def get_credentials(*, scopes):
|
||||
return credentials
|
||||
|
||||
|
||||
def get_google_spreadsheets_api_client(scopes=('https://www.googleapis.com/auth/spreadsheets', )):
|
||||
def get_google_spreadsheets_api_client(scopes=('https://www.googleapis.com/auth/spreadsheets',)):
|
||||
credentials = get_credentials(scopes=scopes)
|
||||
http = credentials.authorize(httplib2.Http())
|
||||
discoveryUrl = 'https://sheets.googleapis.com/$discovery/rest?version=v4'
|
||||
return discovery.build('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl, cache_discovery=False)
|
||||
|
||||
|
||||
def get_google_people_api_client(scopes=('https://www.googleapis.com/auth/contacts', )):
|
||||
def get_google_people_api_client(scopes=('https://www.googleapis.com/auth/contacts',)):
|
||||
credentials = get_credentials(scopes=scopes)
|
||||
http = credentials.authorize(httplib2.Http())
|
||||
discoveryUrl = 'https://people.googleapis.com/$discovery/rest?version=v1'
|
||||
|
||||
@ -5,6 +5,4 @@ def _jupyter_nbextension_paths():
|
||||
return [{'section': 'notebook', 'src': 'static', 'dest': 'bonobo-jupyter', 'require': 'bonobo-jupyter/extension'}]
|
||||
|
||||
|
||||
__all__ = [
|
||||
'JupyterOutputPlugin',
|
||||
]
|
||||
__all__ = ['JupyterOutputPlugin']
|
||||
|
||||
@ -44,15 +44,9 @@ class OpenDataSoftAPI(Configurable):
|
||||
break
|
||||
|
||||
for row in records:
|
||||
yield {
|
||||
**row.get('fields', {}),
|
||||
'geometry': row.get('geometry', {}),
|
||||
'recordid': row.get('recordid'),
|
||||
}
|
||||
yield {**row.get('fields', {}), 'geometry': row.get('geometry', {}), 'recordid': row.get('recordid')}
|
||||
|
||||
start += self.rows
|
||||
|
||||
|
||||
__all__ = [
|
||||
'OpenDataSoftAPI',
|
||||
]
|
||||
__all__ = ['OpenDataSoftAPI']
|
||||
|
||||
@ -16,10 +16,7 @@ class InactiveWritableError(InactiveIOError):
|
||||
class ValidationError(RuntimeError):
|
||||
def __init__(self, inst, message):
|
||||
super(ValidationError, self).__init__(
|
||||
'Validation error in {class_name}: {message}'.format(
|
||||
class_name=type(inst).__name__,
|
||||
message=message,
|
||||
)
|
||||
'Validation error in {class_name}: {message}'.format(class_name=type(inst).__name__, message=message)
|
||||
)
|
||||
|
||||
|
||||
@ -42,8 +39,7 @@ class AbstractError(UnrecoverableError, NotImplementedError):
|
||||
def __init__(self, method):
|
||||
super().__init__(
|
||||
'Call to abstract method {class_name}.{method_name}(...): missing implementation.'.format(
|
||||
class_name=get_name(method.__self__),
|
||||
method_name=get_name(method),
|
||||
class_name=get_name(method.__self__), method_name=get_name(method)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@ -4,19 +4,9 @@ import bonobo
|
||||
def get_argument_parser(parser=None):
|
||||
parser = bonobo.get_argument_parser(parser=parser)
|
||||
|
||||
parser.add_argument('--limit', '-l', type=int, default=None, help='If set, limits the number of processed lines.')
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
'-l',
|
||||
type=int,
|
||||
default=None,
|
||||
help='If set, limits the number of processed lines.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--print',
|
||||
'-p',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If set, pretty prints before writing to output file.'
|
||||
'--print', '-p', action='store_true', default=False, help='If set, pretty prints before writing to output file.'
|
||||
)
|
||||
|
||||
return parser
|
||||
@ -26,7 +16,4 @@ def get_graph_options(options):
|
||||
_limit = options.pop('limit', None)
|
||||
_print = options.pop('print', False)
|
||||
|
||||
return {
|
||||
'_limit': (bonobo.Limit(_limit), ) if _limit else (),
|
||||
'_print': (bonobo.PrettyPrinter(), ) if _print else (),
|
||||
}
|
||||
return {'_limit': (bonobo.Limit(_limit),) if _limit else (), '_print': (bonobo.PrettyPrinter(),) if _print else ()}
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import bonobo
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import bonobo
|
||||
|
||||
|
||||
def extract():
|
||||
"""Placeholder, change, rename, remove... """
|
||||
@ -13,10 +14,7 @@ def extract():
|
||||
|
||||
def get_graph():
|
||||
graph = bonobo.Graph()
|
||||
graph.add_chain(
|
||||
extract,
|
||||
print,
|
||||
)
|
||||
graph.add_chain(extract, print)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
@ -4,26 +4,18 @@ import bonobo
|
||||
from bonobo import examples
|
||||
from bonobo.examples.datasets.coffeeshops import get_graph as get_coffeeshops_graph
|
||||
from bonobo.examples.datasets.fablabs import get_graph as get_fablabs_graph
|
||||
from bonobo.examples.datasets.services import get_services, get_datasets_dir, get_minor_version
|
||||
from bonobo.examples.datasets.services import get_datasets_dir, get_minor_version, get_services
|
||||
|
||||
graph_factories = {
|
||||
'coffeeshops': get_coffeeshops_graph,
|
||||
'fablabs': get_fablabs_graph,
|
||||
}
|
||||
graph_factories = {'coffeeshops': get_coffeeshops_graph, 'fablabs': get_fablabs_graph}
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = examples.get_argument_parser()
|
||||
parser.add_argument(
|
||||
'--target', '-t', choices=graph_factories.keys(), nargs='+'
|
||||
)
|
||||
parser.add_argument('--target', '-t', choices=graph_factories.keys(), nargs='+')
|
||||
parser.add_argument('--sync', action='store_true', default=False)
|
||||
|
||||
with bonobo.parse_args(parser) as options:
|
||||
graph_options = examples.get_graph_options(options)
|
||||
graph_names = list(
|
||||
options['target']
|
||||
if options['target'] else sorted(graph_factories.keys())
|
||||
)
|
||||
graph_names = list(options['target'] if options['target'] else sorted(graph_factories.keys()))
|
||||
|
||||
# Create a graph with all requested subgraphs
|
||||
graph = bonobo.Graph()
|
||||
@ -43,18 +35,9 @@ if __name__ == '__main__':
|
||||
for filename in files:
|
||||
local_path = os.path.join(root, filename)
|
||||
relative_path = os.path.relpath(local_path, local_dir)
|
||||
s3_path = os.path.join(
|
||||
get_minor_version(), relative_path
|
||||
)
|
||||
s3_path = os.path.join(get_minor_version(), relative_path)
|
||||
|
||||
try:
|
||||
s3.head_object(
|
||||
Bucket='bonobo-examples', Key=s3_path
|
||||
)
|
||||
s3.head_object(Bucket='bonobo-examples', Key=s3_path)
|
||||
except:
|
||||
s3.upload_file(
|
||||
local_path,
|
||||
'bonobo-examples',
|
||||
s3_path,
|
||||
ExtraArgs={'ACL': 'public-read'}
|
||||
)
|
||||
s3.upload_file(local_path, 'bonobo-examples', s3_path, ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
@ -13,48 +13,26 @@ def get_graph(graph=None, *, _limit=(), _print=()):
|
||||
graph = graph or bonobo.Graph()
|
||||
|
||||
producer = graph.add_chain(
|
||||
ODSReader(
|
||||
dataset='liste-des-cafes-a-un-euro',
|
||||
netloc='opendata.paris.fr'
|
||||
),
|
||||
ODSReader(dataset='liste-des-cafes-a-un-euro', netloc='opendata.paris.fr'),
|
||||
*_limit,
|
||||
bonobo.UnpackItems(0),
|
||||
bonobo.Rename(
|
||||
name='nom_du_cafe',
|
||||
address='adresse',
|
||||
zipcode='arrondissement'
|
||||
),
|
||||
bonobo.Rename(name='nom_du_cafe', address='adresse', zipcode='arrondissement'),
|
||||
bonobo.Format(city='Paris', country='France'),
|
||||
bonobo.OrderFields(
|
||||
[
|
||||
'name', 'address', 'zipcode', 'city', 'country',
|
||||
'geometry', 'geoloc'
|
||||
]
|
||||
),
|
||||
bonobo.OrderFields(['name', 'address', 'zipcode', 'city', 'country', 'geometry', 'geoloc']),
|
||||
*_print,
|
||||
)
|
||||
|
||||
# Comma separated values.
|
||||
graph.add_chain(
|
||||
bonobo.CsvWriter(
|
||||
'coffeeshops.csv',
|
||||
fields=['name', 'address', 'zipcode', 'city'],
|
||||
delimiter=','
|
||||
),
|
||||
bonobo.CsvWriter('coffeeshops.csv', fields=['name', 'address', 'zipcode', 'city'], delimiter=','),
|
||||
_input=producer.output,
|
||||
)
|
||||
|
||||
# Standard JSON
|
||||
graph.add_chain(
|
||||
bonobo.JsonWriter(path='coffeeshops.json'),
|
||||
_input=producer.output,
|
||||
)
|
||||
graph.add_chain(bonobo.JsonWriter(path='coffeeshops.json'), _input=producer.output)
|
||||
|
||||
# Line-delimited JSON
|
||||
graph.add_chain(
|
||||
bonobo.LdjsonWriter(path='coffeeshops.ldjson'),
|
||||
_input=producer.output,
|
||||
)
|
||||
graph.add_chain(bonobo.LdjsonWriter(path='coffeeshops.ldjson'), _input=producer.output)
|
||||
|
||||
return graph
|
||||
|
||||
@ -63,7 +41,4 @@ if __name__ == '__main__':
|
||||
parser = examples.get_argument_parser()
|
||||
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(**examples.get_graph_options(options)),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(**examples.get_graph_options(options)), services=get_services())
|
||||
|
||||
@ -24,9 +24,7 @@ from bonobo.examples.datasets.services import get_services
|
||||
try:
|
||||
import pycountry
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
'You must install package "pycountry" to run this example.'
|
||||
) from exc
|
||||
raise ImportError('You must install package "pycountry" to run this example.') from exc
|
||||
|
||||
API_DATASET = 'fablabs@public-us'
|
||||
ROWS = 100
|
||||
@ -39,12 +37,8 @@ def _getlink(x):
|
||||
def normalize(row):
|
||||
result = {
|
||||
**row,
|
||||
'links':
|
||||
list(filter(None, map(_getlink, json.loads(row.get('links'))))),
|
||||
'country':
|
||||
pycountry.countries.get(
|
||||
alpha_2=row.get('country_code', '').upper()
|
||||
).name,
|
||||
'links': list(filter(None, map(_getlink, json.loads(row.get('links'))))),
|
||||
'country': pycountry.countries.get(alpha_2=row.get('country_code', '').upper()).name,
|
||||
}
|
||||
return result
|
||||
|
||||
@ -66,7 +60,4 @@ if __name__ == '__main__':
|
||||
parser = examples.get_argument_parser()
|
||||
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(**examples.get_graph_options(options)),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(**examples.get_graph_options(options)), services=get_services())
|
||||
|
||||
@ -9,9 +9,7 @@ def get_minor_version():
|
||||
|
||||
def get_datasets_dir(*dirs):
|
||||
home_dir = os.path.expanduser('~')
|
||||
target_dir = os.path.join(
|
||||
home_dir, '.cache/bonobo', get_minor_version(), *dirs
|
||||
)
|
||||
target_dir = os.path.join(home_dir, '.cache/bonobo', get_minor_version(), *dirs)
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
return target_dir
|
||||
|
||||
|
||||
@ -2,7 +2,4 @@ from bonobo import get_examples_path, open_fs
|
||||
|
||||
|
||||
def get_services():
|
||||
return {
|
||||
'fs': open_fs(get_examples_path()),
|
||||
'fs.output': open_fs(),
|
||||
}
|
||||
return {'fs': open_fs(get_examples_path()), 'fs.output': open_fs()}
|
||||
|
||||
@ -5,8 +5,8 @@ from bonobo.examples.files._services import get_services
|
||||
def get_graph(*, _limit=None, _print=False):
|
||||
return bonobo.Graph(
|
||||
bonobo.CsvReader('datasets/coffeeshops.txt'),
|
||||
*((bonobo.Limit(_limit), ) if _limit else ()),
|
||||
*((bonobo.PrettyPrinter(), ) if _print else ()),
|
||||
*((bonobo.Limit(_limit),) if _limit else ()),
|
||||
*((bonobo.PrettyPrinter(),) if _print else ()),
|
||||
bonobo.CsvWriter('coffeeshops.csv', fs='fs.output')
|
||||
)
|
||||
|
||||
@ -14,23 +14,10 @@ def get_graph(*, _limit=None, _print=False):
|
||||
if __name__ == '__main__':
|
||||
parser = bonobo.get_argument_parser()
|
||||
|
||||
parser.add_argument('--limit', '-l', type=int, default=None, help='If set, limits the number of processed lines.')
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
'-l',
|
||||
type=int,
|
||||
default=None,
|
||||
help='If set, limits the number of processed lines.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--print',
|
||||
'-p',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If set, pretty prints before writing to output file.'
|
||||
'--print', '-p', action='store_true', default=False, help='If set, pretty prints before writing to output file.'
|
||||
)
|
||||
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(_limit=options['limit'], _print=options['print']),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(_limit=options['limit'], _print=options['print']), services=get_services())
|
||||
|
||||
@ -5,22 +5,13 @@ from bonobo.examples.files._services import get_services
|
||||
def get_graph(*, _limit=None, _print=False):
|
||||
graph = bonobo.Graph()
|
||||
|
||||
trunk = graph.add_chain(
|
||||
bonobo.JsonReader('datasets/theaters.json'),
|
||||
*((bonobo.Limit(_limit), ) if _limit else ()),
|
||||
)
|
||||
trunk = graph.add_chain(bonobo.JsonReader('datasets/theaters.json'), *((bonobo.Limit(_limit),) if _limit else ()))
|
||||
|
||||
if _print:
|
||||
graph.add_chain(bonobo.PrettyPrinter(), _input=trunk.output)
|
||||
|
||||
graph.add_chain(
|
||||
bonobo.JsonWriter('theaters.json', fs='fs.output'),
|
||||
_input=trunk.output
|
||||
)
|
||||
graph.add_chain(
|
||||
bonobo.LdjsonWriter('theaters.ldjson', fs='fs.output'),
|
||||
_input=trunk.output
|
||||
)
|
||||
graph.add_chain(bonobo.JsonWriter('theaters.json', fs='fs.output'), _input=trunk.output)
|
||||
graph.add_chain(bonobo.LdjsonWriter('theaters.ldjson', fs='fs.output'), _input=trunk.output)
|
||||
|
||||
return graph
|
||||
|
||||
@ -28,23 +19,10 @@ def get_graph(*, _limit=None, _print=False):
|
||||
if __name__ == '__main__':
|
||||
parser = bonobo.get_argument_parser()
|
||||
|
||||
parser.add_argument('--limit', '-l', type=int, default=None, help='If set, limits the number of processed lines.')
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
'-l',
|
||||
type=int,
|
||||
default=None,
|
||||
help='If set, limits the number of processed lines.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--print',
|
||||
'-p',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='If set, pretty prints before writing to output file.'
|
||||
'--print', '-p', action='store_true', default=False, help='If set, pretty prints before writing to output file.'
|
||||
)
|
||||
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(_limit=options['limit'], _print=options['print']),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(_limit=options['limit'], _print=options['print']), services=get_services())
|
||||
|
||||
@ -35,9 +35,7 @@ from bonobo import examples
|
||||
|
||||
def cleanse_sms(category, sms):
|
||||
if category == 'spam':
|
||||
sms_clean = '**MARKED AS SPAM** ' + sms[0:50] + (
|
||||
'...' if len(sms) > 50 else ''
|
||||
)
|
||||
sms_clean = '**MARKED AS SPAM** ' + sms[0:50] + ('...' if len(sms) > 50 else '')
|
||||
elif category == 'ham':
|
||||
sms_clean = sms
|
||||
else:
|
||||
@ -62,16 +60,11 @@ def get_graph(*, _limit=(), _print=()):
|
||||
|
||||
def get_services():
|
||||
from ._services import get_services
|
||||
return {
|
||||
**get_services(), 'fs':
|
||||
TarFS(bonobo.get_examples_path('datasets/spam.tgz'))
|
||||
}
|
||||
|
||||
return {**get_services(), 'fs': TarFS(bonobo.get_examples_path('datasets/spam.tgz'))}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = examples.get_argument_parser()
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(**examples.get_graph_options(options)),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(**examples.get_graph_options(options)), services=get_services())
|
||||
|
||||
@ -23,7 +23,4 @@ def get_graph(*, _limit=(), _print=()):
|
||||
if __name__ == '__main__':
|
||||
parser = examples.get_argument_parser()
|
||||
with bonobo.parse_args(parser) as options:
|
||||
bonobo.run(
|
||||
get_graph(**examples.get_graph_options(options)),
|
||||
services=get_services()
|
||||
)
|
||||
bonobo.run(get_graph(**examples.get_graph_options(options)), services=get_services())
|
||||
|
||||
@ -9,8 +9,4 @@ from bonobo.execution.contexts.graph import GraphExecutionContext
|
||||
from bonobo.execution.contexts.node import NodeExecutionContext
|
||||
from bonobo.execution.contexts.plugin import PluginExecutionContext
|
||||
|
||||
__all__ = [
|
||||
'GraphExecutionContext',
|
||||
'NodeExecutionContext',
|
||||
'PluginExecutionContext',
|
||||
]
|
||||
__all__ = ['GraphExecutionContext', 'NodeExecutionContext', 'PluginExecutionContext']
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
from functools import partial
|
||||
from time import sleep
|
||||
|
||||
from whistle import EventDispatcher
|
||||
|
||||
from bonobo.config import create_container
|
||||
from bonobo.constants import BEGIN, END
|
||||
from bonobo.execution import events
|
||||
from bonobo.execution.contexts.node import NodeExecutionContext
|
||||
from bonobo.execution.contexts.plugin import PluginExecutionContext
|
||||
from whistle import EventDispatcher
|
||||
|
||||
|
||||
class GraphExecutionContext:
|
||||
|
||||
@ -7,11 +7,11 @@ from types import GeneratorType
|
||||
|
||||
from bonobo.config import create_container
|
||||
from bonobo.config.processors import ContextCurrifier
|
||||
from bonobo.constants import NOT_MODIFIED, BEGIN, END, TICK_PERIOD, Token, Flag, INHERIT
|
||||
from bonobo.constants import BEGIN, END, INHERIT, NOT_MODIFIED, TICK_PERIOD, Flag, Token
|
||||
from bonobo.errors import InactiveReadableError, UnrecoverableError, UnrecoverableTypeError
|
||||
from bonobo.execution.contexts.base import BaseContext
|
||||
from bonobo.structs.inputs import Input
|
||||
from bonobo.util import get_name, isconfigurabletype, ensure_tuple
|
||||
from bonobo.util import ensure_tuple, get_name, isconfigurabletype
|
||||
from bonobo.util.bags import BagType
|
||||
from bonobo.util.statistics import WithStatistics
|
||||
|
||||
@ -105,10 +105,7 @@ class NodeExecutionContext(BaseContext, WithStatistics):
|
||||
except Empty:
|
||||
sleep(TICK_PERIOD) # XXX: How do we determine this constant?
|
||||
continue
|
||||
except (
|
||||
NotImplementedError,
|
||||
UnrecoverableError,
|
||||
):
|
||||
except (NotImplementedError, UnrecoverableError):
|
||||
self.fatal(sys.exc_info()) # exit loop
|
||||
except Exception: # pylint: disable=broad-except
|
||||
self.error(sys.exc_info()) # does not exit loop
|
||||
|
||||
@ -9,9 +9,7 @@ at home if you want to give it a shot.
|
||||
from bonobo.execution.strategies.executor import ProcessPoolExecutorStrategy, ThreadPoolExecutorStrategy
|
||||
from bonobo.execution.strategies.naive import NaiveStrategy
|
||||
|
||||
__all__ = [
|
||||
'create_strategy',
|
||||
]
|
||||
__all__ = ['create_strategy']
|
||||
|
||||
STRATEGIES = {
|
||||
'naive': NaiveStrategy,
|
||||
|
||||
@ -6,6 +6,7 @@ class Strategy:
|
||||
Base class for execution strategies.
|
||||
|
||||
"""
|
||||
|
||||
GraphExecutionContextType = GraphExecutionContext
|
||||
|
||||
def __init__(self, GraphExecutionContextType=None):
|
||||
|
||||
@ -3,14 +3,15 @@ import html
|
||||
import itertools
|
||||
import pprint
|
||||
|
||||
from mondrian import term
|
||||
|
||||
from bonobo import settings
|
||||
from bonobo.config import Configurable, Option, Method, use_raw_input, use_context, use_no_input
|
||||
from bonobo.config import Configurable, Method, Option, use_context, use_no_input, use_raw_input
|
||||
from bonobo.config.functools import transformation_factory
|
||||
from bonobo.config.processors import ContextProcessor, use_context_processor
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
from bonobo.util.objects import ValueHolder
|
||||
from bonobo.util.term import CLEAR_EOL
|
||||
from mondrian import term
|
||||
|
||||
__all__ = [
|
||||
'FixedWindow',
|
||||
@ -43,6 +44,7 @@ class Limit(Configurable):
|
||||
TODO: simplify into a closure building factory?
|
||||
|
||||
"""
|
||||
|
||||
limit = Option(positional=True, default=10)
|
||||
|
||||
@ContextProcessor
|
||||
@ -69,7 +71,7 @@ def Tee(f):
|
||||
|
||||
def _shorten(s, w):
|
||||
if w and len(s) > w:
|
||||
s = s[0:w - 3] + '...'
|
||||
s = s[0 : w - 3] + '...'
|
||||
return s
|
||||
|
||||
|
||||
@ -80,17 +82,19 @@ class PrettyPrinter(Configurable):
|
||||
required=False,
|
||||
__doc__='''
|
||||
If set, truncates the output values longer than this to this width.
|
||||
'''
|
||||
''',
|
||||
)
|
||||
|
||||
filter = Method(
|
||||
default=
|
||||
(lambda self, index, key, value: (value is not None) and (not isinstance(key, str) or not key.startswith('_'))),
|
||||
default=(
|
||||
lambda self, index, key, value: (value is not None)
|
||||
and (not isinstance(key, str) or not key.startswith('_'))
|
||||
),
|
||||
__doc__='''
|
||||
A filter that determine what to print.
|
||||
|
||||
Default is to ignore any key starting with an underscore and none values.
|
||||
'''
|
||||
''',
|
||||
)
|
||||
|
||||
@ContextProcessor
|
||||
@ -99,6 +103,7 @@ class PrettyPrinter(Configurable):
|
||||
yield context
|
||||
if context._jupyter_html is not None:
|
||||
from IPython.display import display, HTML
|
||||
|
||||
display(HTML('\n'.join(['<table>'] + context._jupyter_html + ['</table>'])))
|
||||
|
||||
def __call__(self, context, *args, **kwargs):
|
||||
@ -153,16 +158,11 @@ class PrettyPrinter(Configurable):
|
||||
if not context._jupyter_html:
|
||||
context._jupyter_html = [
|
||||
'<thead><tr>',
|
||||
*map('<th>{}</th>'.format, map(html.escape, map(str,
|
||||
context.get_input_fields() or range(len(args))))),
|
||||
*map('<th>{}</th>'.format, map(html.escape, map(str, context.get_input_fields() or range(len(args))))),
|
||||
'</tr></thead>',
|
||||
]
|
||||
|
||||
context._jupyter_html += [
|
||||
'<tr>',
|
||||
*map('<td>{}</td>'.format, map(html.escape, map(repr, args))),
|
||||
'</tr>',
|
||||
]
|
||||
context._jupyter_html += ['<tr>', *map('<td>{}</td>'.format, map(html.escape, map(repr, args))), '</tr>']
|
||||
|
||||
|
||||
@use_no_input
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
|
||||
from bonobo.config import Configurable, Method
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
|
||||
|
||||
class Filter(Configurable):
|
||||
|
||||
@ -13,22 +13,39 @@ class FileHandler(Configurable):
|
||||
"""
|
||||
|
||||
path = Option(
|
||||
str, required=True, positional=True, __doc__='''
|
||||
str,
|
||||
required=True,
|
||||
positional=True,
|
||||
__doc__='''
|
||||
Path to use within the provided filesystem.
|
||||
'''
|
||||
''',
|
||||
) # type: str
|
||||
eol = Option(str, default='\n', __doc__='''
|
||||
eol = Option(
|
||||
str,
|
||||
default='\n',
|
||||
__doc__='''
|
||||
Character to use as line separator.
|
||||
''') # type: str
|
||||
mode = Option(str, __doc__='''
|
||||
''',
|
||||
) # type: str
|
||||
mode = Option(
|
||||
str,
|
||||
__doc__='''
|
||||
What mode to use for open() call.
|
||||
''') # type: str
|
||||
encoding = Option(str, default='utf-8', __doc__='''
|
||||
''',
|
||||
) # type: str
|
||||
encoding = Option(
|
||||
str,
|
||||
default='utf-8',
|
||||
__doc__='''
|
||||
Encoding.
|
||||
''') # type: str
|
||||
fs = Service('fs', __doc__='''
|
||||
''',
|
||||
) # type: str
|
||||
fs = Service(
|
||||
'fs',
|
||||
__doc__='''
|
||||
The filesystem instance to use.
|
||||
''') # type: str
|
||||
''',
|
||||
) # type: str
|
||||
|
||||
@ContextProcessor
|
||||
def file(self, context, *, fs):
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import csv
|
||||
|
||||
from bonobo.config import Option, use_raw_input, use_context
|
||||
from bonobo.config import Option, use_context, use_raw_input
|
||||
from bonobo.config.options import Method, RenamedOption
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
from bonobo.nodes.io.base import FileHandler
|
||||
@ -62,7 +62,7 @@ class CsvReader(FileReader, CsvHandler):
|
||||
default=0,
|
||||
__doc__='''
|
||||
If set and greater than zero, the reader will skip this amount of lines.
|
||||
'''
|
||||
''',
|
||||
)
|
||||
|
||||
@Method(
|
||||
@ -72,7 +72,7 @@ class CsvReader(FileReader, CsvHandler):
|
||||
iterable.
|
||||
|
||||
Defaults to builtin csv.reader(...), but can be overriden to fit your special needs.
|
||||
'''
|
||||
''',
|
||||
)
|
||||
def reader_factory(self, file):
|
||||
return csv.reader(file, **self.get_dialect_kwargs())
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from bonobo.config import Option, ContextProcessor, use_context
|
||||
from bonobo.config import ContextProcessor, Option, use_context
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
from bonobo.errors import UnrecoverableError
|
||||
from bonobo.nodes.io.base import FileHandler, Reader, Writer
|
||||
@ -12,9 +12,13 @@ class FileReader(Reader, FileHandler):
|
||||
present. Extending it is usually the right way to create more specific file readers (like json, csv, etc.)
|
||||
"""
|
||||
|
||||
mode = Option(str, default='r', __doc__='''
|
||||
mode = Option(
|
||||
str,
|
||||
default='r',
|
||||
__doc__='''
|
||||
What mode to use for open() call.
|
||||
''') # type: str
|
||||
''',
|
||||
) # type: str
|
||||
|
||||
output_fields = Option(
|
||||
ensure_tuple,
|
||||
@ -22,14 +26,14 @@ class FileReader(Reader, FileHandler):
|
||||
__doc__='''
|
||||
Specify the field names of output lines.
|
||||
Mutually exclusive with "output_type".
|
||||
'''
|
||||
''',
|
||||
)
|
||||
output_type = Option(
|
||||
required=False,
|
||||
__doc__='''
|
||||
Specify the type of output lines.
|
||||
Mutually exclusive with "output_fields".
|
||||
'''
|
||||
''',
|
||||
)
|
||||
|
||||
@ContextProcessor
|
||||
@ -72,9 +76,13 @@ class FileWriter(Writer, FileHandler):
|
||||
usually the right way to create more specific file writers (like json, csv, etc.)
|
||||
"""
|
||||
|
||||
mode = Option(str, default='w+', __doc__='''
|
||||
mode = Option(
|
||||
str,
|
||||
default='w+',
|
||||
__doc__='''
|
||||
What mode to use for open() call.
|
||||
''') # type: str
|
||||
''',
|
||||
) # type: str
|
||||
|
||||
def write(self, file, context, line, *, fs):
|
||||
"""
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
import io
|
||||
import sys
|
||||
from contextlib import redirect_stdout, redirect_stderr
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
|
||||
from colorama import Style, Fore, init as initialize_colorama_output_wrappers
|
||||
from colorama import Fore, Style
|
||||
from colorama import init as initialize_colorama_output_wrappers
|
||||
|
||||
from bonobo import settings
|
||||
from bonobo.execution import events
|
||||
@ -34,7 +35,7 @@ class ConsoleOutputPlugin(Plugin):
|
||||
isatty = False
|
||||
|
||||
# Whether we're on windows, or a real operating system.
|
||||
iswindows = (sys.platform == 'win32')
|
||||
iswindows = sys.platform == 'win32'
|
||||
|
||||
def __init__(self):
|
||||
self.isatty = self._stdout.isatty()
|
||||
@ -95,27 +96,32 @@ class ConsoleOutputPlugin(Plugin):
|
||||
|
||||
liveliness_color = alive_color if node.alive else dead_color
|
||||
liveliness_prefix = ' {}{}{} '.format(liveliness_color, node.status, Style.RESET_ALL)
|
||||
_line = ''.join((
|
||||
liveliness_prefix,
|
||||
node.name,
|
||||
name_suffix,
|
||||
' ',
|
||||
node.get_statistics_as_string(),
|
||||
' ',
|
||||
node.get_flags_as_string(),
|
||||
Style.RESET_ALL,
|
||||
' ',
|
||||
))
|
||||
_line = ''.join(
|
||||
(
|
||||
liveliness_prefix,
|
||||
node.name,
|
||||
name_suffix,
|
||||
' ',
|
||||
node.get_statistics_as_string(),
|
||||
' ',
|
||||
node.get_flags_as_string(),
|
||||
Style.RESET_ALL,
|
||||
' ',
|
||||
)
|
||||
)
|
||||
print(prefix + _line + CLEAR_EOL, file=self._stderr)
|
||||
|
||||
if append:
|
||||
# todo handle multiline
|
||||
print(
|
||||
''.join((
|
||||
' `-> ', ' '.join('{}{}{}: {}'.format(Style.BRIGHT, k, Style.RESET_ALL, v) for k, v in append),
|
||||
CLEAR_EOL
|
||||
)),
|
||||
file=self._stderr
|
||||
''.join(
|
||||
(
|
||||
' `-> ',
|
||||
' '.join('{}{}{}: {}'.format(Style.BRIGHT, k, Style.RESET_ALL, v) for k, v in append),
|
||||
CLEAR_EOL,
|
||||
)
|
||||
),
|
||||
file=self._stderr,
|
||||
)
|
||||
t_cnt += 1
|
||||
|
||||
@ -128,16 +134,17 @@ class ConsoleOutputPlugin(Plugin):
|
||||
if self.counter % 10 and self._append_cache:
|
||||
append = self._append_cache
|
||||
else:
|
||||
self._append_cache = append = (('Memory', '{0:.2f} Mb'.format(memory_usage())),
|
||||
# ('Total time', '{0} s'.format(execution_time(harness))),
|
||||
)
|
||||
self._append_cache = append = (
|
||||
('Memory', '{0:.2f} Mb'.format(memory_usage())),
|
||||
# ('Total time', '{0} s'.format(execution_time(harness))),
|
||||
)
|
||||
else:
|
||||
append = ()
|
||||
self.write(context, prefix=self.prefix, append=append, rewind=rewind)
|
||||
self.counter += 1
|
||||
|
||||
|
||||
class IOBuffer():
|
||||
class IOBuffer:
|
||||
"""
|
||||
The role of IOBuffer is to overcome the problem of multiple threads wanting to write to stdout at the same time. It
|
||||
works a bit like a videogame: there are two buffers, one that is used to write, and one which is used to read from.
|
||||
@ -164,5 +171,6 @@ class IOBuffer():
|
||||
|
||||
def memory_usage():
|
||||
import os, psutil
|
||||
|
||||
process = psutil.Process(os.getpid())
|
||||
return process.memory_info()[0] / float(2**20)
|
||||
return process.memory_info()[0] / float(2 ** 20)
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import mimetypes
|
||||
|
||||
import os
|
||||
|
||||
from bonobo import JsonReader, CsvReader, PickleReader, FileReader, FileWriter, PickleWriter, CsvWriter, JsonWriter
|
||||
from bonobo import CsvReader, CsvWriter, FileReader, FileWriter, JsonReader, JsonWriter, PickleReader, PickleWriter
|
||||
|
||||
FILETYPE_CSV = 'text/csv'
|
||||
FILETYPE_JSON = 'application/json'
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
|
||||
import os
|
||||
|
||||
from bonobo.errors import ValidationError
|
||||
@ -92,17 +91,14 @@ LOGGING_LEVEL = Setting(
|
||||
'LOGGING_LEVEL',
|
||||
formatter=logging._checkLevel,
|
||||
validator=logging._checkLevel,
|
||||
default=lambda: logging.DEBUG if DEBUG.get() else logging.INFO
|
||||
default=lambda: logging.DEBUG if DEBUG.get() else logging.INFO,
|
||||
)
|
||||
|
||||
# Input/Output format for transformations
|
||||
IOFORMAT_ARG0 = 'arg0'
|
||||
IOFORMAT_KWARGS = 'kwargs'
|
||||
|
||||
IOFORMATS = {
|
||||
IOFORMAT_ARG0,
|
||||
IOFORMAT_KWARGS,
|
||||
}
|
||||
IOFORMATS = {IOFORMAT_ARG0, IOFORMAT_KWARGS}
|
||||
|
||||
IOFORMAT = Setting('IOFORMAT', default=IOFORMAT_KWARGS, validator=IOFORMATS.__contains__)
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from bonobo.structs.graphs import Graph
|
||||
|
||||
__all__ = [
|
||||
'Graph',
|
||||
]
|
||||
__all__ = ['Graph']
|
||||
|
||||
@ -3,11 +3,12 @@ import json
|
||||
from collections import namedtuple
|
||||
from copy import copy
|
||||
|
||||
from bonobo.constants import BEGIN
|
||||
from bonobo.util import get_name
|
||||
from graphviz import ExecutableNotFound
|
||||
from graphviz.dot import Digraph
|
||||
|
||||
from bonobo.constants import BEGIN
|
||||
from bonobo.util import get_name
|
||||
|
||||
GraphRange = namedtuple('GraphRange', ['graph', 'input', 'output'])
|
||||
|
||||
|
||||
@ -15,6 +16,7 @@ class Graph:
|
||||
"""
|
||||
Represents a directed graph of nodes.
|
||||
"""
|
||||
|
||||
name = ''
|
||||
|
||||
def __init__(self, *chain):
|
||||
|
||||
@ -16,7 +16,7 @@ from bonobo.util.inspect import (
|
||||
istuple,
|
||||
istype,
|
||||
)
|
||||
from bonobo.util.objects import (get_name, get_attribute_or_create, ValueHolder)
|
||||
from bonobo.util.objects import get_name, get_attribute_or_create, ValueHolder
|
||||
|
||||
# Bonobo's util API
|
||||
__all__ = [
|
||||
|
||||
@ -12,14 +12,14 @@ class ApiHelper:
|
||||
if graph:
|
||||
# This function must comply to the "graph" API interface, meaning it can bahave like bonobo.run.
|
||||
from inspect import signature
|
||||
|
||||
parameters = list(signature(x).parameters)
|
||||
required_parameters = {'plugins', 'services', 'strategy'}
|
||||
assert len(parameters
|
||||
) > 0 and parameters[0] == 'graph', 'First parameter of a graph api function must be "graph".'
|
||||
assert required_parameters.intersection(
|
||||
parameters
|
||||
) == required_parameters, 'Graph api functions must define the following parameters: ' + ', '.join(
|
||||
sorted(required_parameters)
|
||||
assert (
|
||||
len(parameters) > 0 and parameters[0] == 'graph'
|
||||
), 'First parameter of a graph api function must be "graph".'
|
||||
assert required_parameters.intersection(parameters) == required_parameters, (
|
||||
'Graph api functions must define the following parameters: ' + ', '.join(sorted(required_parameters))
|
||||
)
|
||||
|
||||
self.__all__.append(get_name(x))
|
||||
|
||||
@ -73,7 +73,9 @@ class {typename}(tuple):
|
||||
|
||||
_field_template = '''\
|
||||
{name} = _property(_itemgetter({index:d}), doc={doc!r})
|
||||
'''.strip('\n')
|
||||
'''.strip(
|
||||
'\n'
|
||||
)
|
||||
|
||||
_reserved = frozenset(
|
||||
['_', '_cls', '_attrs', '_fields', 'get', '_asdict', '_replace', '_make', 'self', '_self', 'tuple'] + dir(tuple)
|
||||
@ -150,16 +152,19 @@ def BagType(typename, fields, *, verbose=False, module=None):
|
||||
attrs=attrs,
|
||||
num_fields=len(fields),
|
||||
arg_list=repr(attrs).replace("'", "")[1:-1],
|
||||
repr_fmt=', '.join(('%r' if isinstance(fields[index], int) else '{name}=%r').format(name=name)
|
||||
for index, name in enumerate(attrs)),
|
||||
repr_fmt=', '.join(
|
||||
('%r' if isinstance(fields[index], int) else '{name}=%r').format(name=name)
|
||||
for index, name in enumerate(attrs)
|
||||
),
|
||||
field_defs='\n'.join(
|
||||
_field_template.format(
|
||||
index=index,
|
||||
name=name,
|
||||
doc='Alias for ' +
|
||||
('field #{}'.format(index) if isinstance(fields[index], int) else repr(fields[index]))
|
||||
) for index, name in enumerate(attrs)
|
||||
)
|
||||
doc='Alias for '
|
||||
+ ('field #{}'.format(index) if isinstance(fields[index], int) else repr(fields[index])),
|
||||
)
|
||||
for index, name in enumerate(attrs)
|
||||
),
|
||||
)
|
||||
|
||||
# Execute the template string in a temporary namespace and support
|
||||
|
||||
@ -26,7 +26,7 @@ def ensure_tuple(tuple_or_mixed, *, cls=tuple):
|
||||
if isinstance(tuple_or_mixed, tuple):
|
||||
return tuple.__new__(cls, tuple_or_mixed)
|
||||
|
||||
return tuple.__new__(cls, (tuple_or_mixed, ))
|
||||
return tuple.__new__(cls, (tuple_or_mixed,))
|
||||
|
||||
|
||||
def cast(type_):
|
||||
|
||||
@ -9,7 +9,7 @@ def deprecated_alias(alias, func):
|
||||
warnings.warn(
|
||||
"Call to deprecated function alias {}, use {} instead.".format(alias, func.__name__),
|
||||
category=DeprecationWarning,
|
||||
stacklevel=2
|
||||
stacklevel=2,
|
||||
)
|
||||
warnings.simplefilter('default', DeprecationWarning) # reset filter
|
||||
return func(*args, **kwargs)
|
||||
|
||||
@ -58,6 +58,7 @@ def get_argument_parser(parser=None):
|
||||
"""
|
||||
if parser is None:
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
# Store globally to be able to warn the user about the fact he's probably wrong not to pass a parser to
|
||||
@ -94,6 +95,7 @@ def parse_args(mixed=None):
|
||||
)
|
||||
# use the api from bonobo namespace, in case a command patched it.
|
||||
import bonobo
|
||||
|
||||
mixed = bonobo.get_argument_parser()
|
||||
|
||||
if isinstance(mixed, argparse.ArgumentParser):
|
||||
|
||||
@ -9,6 +9,7 @@ def isconfigurable(mixed):
|
||||
:return: bool
|
||||
"""
|
||||
from bonobo.config.configurables import Configurable
|
||||
|
||||
return isinstance(mixed, Configurable)
|
||||
|
||||
|
||||
@ -47,6 +48,7 @@ def isoption(mixed):
|
||||
"""
|
||||
|
||||
from bonobo.config.options import Option
|
||||
|
||||
return isinstance(mixed, Option)
|
||||
|
||||
|
||||
@ -58,6 +60,7 @@ def ismethod(mixed):
|
||||
:return: bool
|
||||
"""
|
||||
from bonobo.config.options import Method
|
||||
|
||||
return isinstance(mixed, Method)
|
||||
|
||||
|
||||
@ -69,6 +72,7 @@ def iscontextprocessor(x):
|
||||
:return: bool
|
||||
"""
|
||||
from bonobo.config.processors import ContextProcessor
|
||||
|
||||
return isinstance(x, ContextProcessor)
|
||||
|
||||
|
||||
@ -102,15 +106,7 @@ def istuple(mixed):
|
||||
return isinstance(mixed, tuple)
|
||||
|
||||
|
||||
ConfigurableInspection = namedtuple(
|
||||
'ConfigurableInspection', [
|
||||
'type',
|
||||
'instance',
|
||||
'options',
|
||||
'processors',
|
||||
'partial',
|
||||
]
|
||||
)
|
||||
ConfigurableInspection = namedtuple('ConfigurableInspection', ['type', 'instance', 'options', 'processors', 'partial'])
|
||||
|
||||
ConfigurableInspection.__enter__ = lambda self: self
|
||||
ConfigurableInspection.__exit__ = lambda *exc_details: None
|
||||
@ -141,10 +137,4 @@ def inspect_node(mixed, *, _partial=None):
|
||||
'Not a Configurable, nor a Configurable instance and not even a partially configured Configurable. Check your inputs.'
|
||||
)
|
||||
|
||||
return ConfigurableInspection(
|
||||
typ,
|
||||
inst,
|
||||
list(typ.__options__),
|
||||
list(typ.__processors__),
|
||||
_partial,
|
||||
)
|
||||
return ConfigurableInspection(typ, inst, list(typ.__options__), list(typ.__processors__), _partial)
|
||||
|
||||
@ -142,10 +142,10 @@ class ValueHolder:
|
||||
return divmod(other, self._value)
|
||||
|
||||
def __pow__(self, other):
|
||||
return self._value**other
|
||||
return self._value ** other
|
||||
|
||||
def __rpow__(self, other):
|
||||
return other**self._value
|
||||
return other ** self._value
|
||||
|
||||
def __ipow__(self, other):
|
||||
self._value **= other
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
CLEAR_EOL = '\033[0K'
|
||||
MOVE_CURSOR_UP = lambda n: '\033[{}A'.format(n)
|
||||
MOVE_CURSOR_UP = lambda n: '\033[{}A'.format(n)
|
||||
|
||||
@ -4,12 +4,12 @@ import io
|
||||
import os
|
||||
import runpy
|
||||
import sys
|
||||
from contextlib import contextmanager, redirect_stdout, redirect_stderr
|
||||
from contextlib import contextmanager, redirect_stderr, redirect_stdout
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from bonobo import open_fs, __main__, get_examples_path
|
||||
from bonobo import __main__, get_examples_path, open_fs
|
||||
from bonobo.commands import entrypoint
|
||||
from bonobo.constants import Token
|
||||
from bonobo.execution.contexts.graph import GraphExecutionContext
|
||||
@ -112,19 +112,13 @@ def runner_module(args):
|
||||
|
||||
all_runners = pytest.mark.parametrize('runner', [runner_entrypoint, runner_module])
|
||||
all_environ_targets = pytest.mark.parametrize(
|
||||
'target', [
|
||||
(get_examples_path('environ.py'), ),
|
||||
(
|
||||
'-m',
|
||||
'bonobo.examples.environ',
|
||||
),
|
||||
]
|
||||
'target', [(get_examples_path('environ.py'),), ('-m', 'bonobo.examples.environ')]
|
||||
)
|
||||
|
||||
|
||||
@all_runners
|
||||
@all_environ_targets
|
||||
class EnvironmentTestCase():
|
||||
class EnvironmentTestCase:
|
||||
def run_quiet(self, runner, *args):
|
||||
return runner('run', '--quiet', *args)
|
||||
|
||||
@ -216,12 +210,12 @@ class ReaderTest(ConfigurableNodeTest):
|
||||
self.tmpdir = tmpdir
|
||||
|
||||
def get_create_args(self, *args):
|
||||
return (self.filename, ) + args
|
||||
return (self.filename,) + args
|
||||
|
||||
def test_customizable_output_type_transform_not_a_type(self):
|
||||
context = self.NodeExecutionContextType(
|
||||
self.create(*self.get_create_args(), output_type=str.upper, **self.get_create_kwargs()),
|
||||
services=self.services
|
||||
services=self.services,
|
||||
)
|
||||
with pytest.raises(TypeError):
|
||||
context.start()
|
||||
@ -229,9 +223,9 @@ class ReaderTest(ConfigurableNodeTest):
|
||||
def test_customizable_output_type_transform_not_a_tuple(self):
|
||||
context = self.NodeExecutionContextType(
|
||||
self.create(
|
||||
*self.get_create_args(), output_type=type('UpperString', (str, ), {}), **self.get_create_kwargs()
|
||||
*self.get_create_args(), output_type=type('UpperString', (str,), {}), **self.get_create_kwargs()
|
||||
),
|
||||
services=self.services
|
||||
services=self.services,
|
||||
)
|
||||
with pytest.raises(TypeError):
|
||||
context.start()
|
||||
@ -256,7 +250,7 @@ class WriterTest(ConfigurableNodeTest):
|
||||
self.tmpdir = tmpdir
|
||||
|
||||
def get_create_args(self, *args):
|
||||
return (self.filename, ) + args
|
||||
return (self.filename,) + args
|
||||
|
||||
def readlines(self):
|
||||
with self.fs.open(self.filename) as fp:
|
||||
|
||||
8
docs/_templates/alabaster/support.py
vendored
8
docs/_templates/alabaster/support.py
vendored
@ -1,8 +1,9 @@
|
||||
# flake8: noqa
|
||||
|
||||
from pygments.style import Style
|
||||
from pygments.token import Keyword, Name, Comment, String, Error, \
|
||||
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
|
||||
from pygments.token import (
|
||||
Comment, Error, Generic, Keyword, Literal, Name, Number, Operator, Other, Punctuation, String, Whitespace
|
||||
)
|
||||
|
||||
|
||||
# Originally based on FlaskyStyle which was based on 'tango'.
|
||||
@ -12,7 +13,7 @@ class Alabaster(Style):
|
||||
|
||||
styles = {
|
||||
# No corresponding class for the following:
|
||||
#Text: "", # class: ''
|
||||
# Text: "", # class: ''
|
||||
Whitespace: "underline #f8f8f8", # class: 'w'
|
||||
Error: "#a40000 border:#ef2929", # class: 'err'
|
||||
Other: "#000000", # class 'x'
|
||||
@ -28,7 +29,6 @@ class Alabaster(Style):
|
||||
Operator: "#582800", # class: 'o'
|
||||
Operator.Word: "bold #004461", # class: 'ow' - like keywords
|
||||
Punctuation: "bold #000000", # class: 'p'
|
||||
|
||||
# because special names such as Name.Class, Name.Function, etc.
|
||||
# are not recognized as such later in the parsing, we choose them
|
||||
# to look the same as ordinary variables.
|
||||
|
||||
33
docs/conf.py
33
docs/conf.py
@ -5,10 +5,11 @@ import datetime
|
||||
import os
|
||||
import sys
|
||||
|
||||
import bonobo
|
||||
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath('_themes'))
|
||||
|
||||
import bonobo
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
@ -63,11 +64,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
|
||||
autoclass_content = 'both'
|
||||
autodoc_member_order = 'groupwise'
|
||||
autodoc_default_flags = [
|
||||
'members',
|
||||
'undoc-members',
|
||||
'show-inheritance',
|
||||
]
|
||||
autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance']
|
||||
|
||||
add_module_names = False
|
||||
pygments_style = 'sphinx'
|
||||
@ -112,7 +109,7 @@ html_sidebars = {
|
||||
'sourcelink.html',
|
||||
'searchbox.html',
|
||||
'sidebarinfos.html',
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
html_theme_path = ['_themes']
|
||||
@ -137,15 +134,12 @@ latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#
|
||||
# 'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#
|
||||
# 'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#
|
||||
# 'preamble': '',
|
||||
|
||||
# Latex figure (float) alignment
|
||||
#
|
||||
# 'figure_align': 'htbp',
|
||||
@ -154,9 +148,7 @@ latex_elements = {
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, 'Bonobo.tex', 'Bonobo Documentation', 'Romain Dorgueil', 'manual'),
|
||||
]
|
||||
latex_documents = [(master_doc, 'Bonobo.tex', 'Bonobo Documentation', 'Romain Dorgueil', 'manual')]
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
@ -171,9 +163,14 @@ man_pages = [(master_doc, 'bonobo', 'Bonobo Documentation', [author], 1)]
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
master_doc, 'Bonobo', 'Bonobo Documentation', author, 'Bonobo', 'One line description of project.',
|
||||
'Miscellaneous'
|
||||
),
|
||||
master_doc,
|
||||
'Bonobo',
|
||||
'Bonobo Documentation',
|
||||
author,
|
||||
'Bonobo',
|
||||
'One line description of project.',
|
||||
'Miscellaneous',
|
||||
)
|
||||
]
|
||||
|
||||
# -- Options for Epub output ----------------------------------------------
|
||||
@ -209,4 +206,6 @@ rst_epilog = """
|
||||
|
||||
.. |longversion| replace:: v.{version}
|
||||
|
||||
""".format(version=version, )
|
||||
""".format(
|
||||
version=version
|
||||
)
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
-e .[dev]
|
||||
-r requirements.txt
|
||||
alabaster==0.7.10
|
||||
alabaster==0.7.11
|
||||
arrow==0.12.1
|
||||
atomicwrites==1.1.5
|
||||
attrs==18.1.0
|
||||
babel==2.5.3
|
||||
babel==2.6.0
|
||||
binaryornot==0.4.4
|
||||
certifi==2018.4.16
|
||||
chardet==3.0.4
|
||||
@ -12,29 +13,29 @@ cookiecutter==1.5.1
|
||||
coverage==4.5.1
|
||||
docutils==0.14
|
||||
future==0.16.0
|
||||
idna==2.6
|
||||
idna==2.7
|
||||
imagesize==1.0.0
|
||||
jinja2-time==0.2.0
|
||||
jinja2==2.10
|
||||
markupsafe==1.0
|
||||
more-itertools==4.1.0
|
||||
more-itertools==4.3.0
|
||||
packaging==17.1
|
||||
pluggy==0.6.0
|
||||
pathlib2==2.3.2
|
||||
pluggy==0.7.1
|
||||
poyo==0.4.1
|
||||
py==1.5.3
|
||||
py==1.5.4
|
||||
pygments==2.2.0
|
||||
pyparsing==2.2.0
|
||||
pytest-cov==2.5.1
|
||||
pytest-timeout==1.2.1
|
||||
pytest==3.5.1
|
||||
pytest-timeout==1.3.1
|
||||
pytest==3.7.1
|
||||
python-dateutil==2.7.3
|
||||
pytz==2018.4
|
||||
requests==2.18.4
|
||||
pytz==2018.5
|
||||
requests==2.19.1
|
||||
six==1.11.0
|
||||
snowballstemmer==1.2.1
|
||||
sphinx-sitemap==0.2
|
||||
sphinx==1.7.4
|
||||
sphinxcontrib-websupport==1.0.1
|
||||
urllib3==1.22
|
||||
sphinx==1.7.6
|
||||
sphinxcontrib-websupport==1.1.0
|
||||
urllib3==1.23
|
||||
whichcraft==0.4.1
|
||||
yapf==0.22.0
|
||||
|
||||
@ -5,26 +5,26 @@ bonobo-docker==0.6.0
|
||||
certifi==2018.4.16
|
||||
chardet==3.0.4
|
||||
colorama==0.3.9
|
||||
docker-pycreds==0.2.3
|
||||
docker-pycreds==0.3.0
|
||||
docker==2.7.0
|
||||
fs==2.0.23
|
||||
graphviz==0.8.3
|
||||
idna==2.6
|
||||
fs==2.0.27
|
||||
graphviz==0.8.4
|
||||
idna==2.7
|
||||
jinja2==2.10
|
||||
markupsafe==1.0
|
||||
mondrian==0.7.0
|
||||
packaging==17.1
|
||||
pbr==4.0.3
|
||||
psutil==5.4.5
|
||||
pbr==4.2.0
|
||||
psutil==5.4.6
|
||||
pyparsing==2.2.0
|
||||
python-slugify==1.2.5
|
||||
pytz==2018.4
|
||||
requests==2.18.4
|
||||
pytz==2018.5
|
||||
requests==2.19.1
|
||||
semantic-version==2.6.0
|
||||
six==1.11.0
|
||||
stevedore==1.28.0
|
||||
stevedore==1.29.0
|
||||
typing==3.6.4
|
||||
unidecode==1.0.22
|
||||
urllib3==1.22
|
||||
websocket-client==0.47.0
|
||||
urllib3==1.23
|
||||
websocket-client==0.48.0
|
||||
whistle==1.0.1
|
||||
|
||||
@ -8,9 +8,9 @@ entrypoints==0.2.3
|
||||
html5lib==1.0.1
|
||||
ipykernel==4.8.2
|
||||
ipython-genutils==0.2.0
|
||||
ipython==6.4.0
|
||||
ipython==6.5.0
|
||||
ipywidgets==6.0.1
|
||||
jedi==0.12.0
|
||||
jedi==0.12.1
|
||||
jinja2==2.10
|
||||
jsonschema==2.6.0
|
||||
jupyter-client==5.2.3
|
||||
@ -21,23 +21,24 @@ markupsafe==1.0
|
||||
mistune==0.8.3
|
||||
nbconvert==5.3.1
|
||||
nbformat==4.4.0
|
||||
notebook==5.5.0
|
||||
notebook==5.6.0
|
||||
pandocfilters==1.4.2
|
||||
parso==0.2.0
|
||||
pexpect==4.5.0
|
||||
parso==0.3.1
|
||||
pexpect==4.6.0
|
||||
pickleshare==0.7.4
|
||||
prometheus-client==0.3.1
|
||||
prompt-toolkit==1.0.15
|
||||
ptyprocess==0.5.2
|
||||
ptyprocess==0.6.0
|
||||
pygments==2.2.0
|
||||
python-dateutil==2.7.3
|
||||
pyzmq==17.0.0
|
||||
pyzmq==17.1.2
|
||||
qtconsole==4.3.1
|
||||
send2trash==1.5.0
|
||||
simplegeneric==0.8.1
|
||||
six==1.11.0
|
||||
terminado==0.8.1
|
||||
testpath==0.3.1
|
||||
tornado==5.0.2
|
||||
tornado==5.1
|
||||
traitlets==4.3.2
|
||||
wcwidth==0.1.7
|
||||
webencodings==0.5.1
|
||||
|
||||
@ -5,23 +5,23 @@ bonobo-sqlalchemy==0.6.0
|
||||
certifi==2018.4.16
|
||||
chardet==3.0.4
|
||||
colorama==0.3.9
|
||||
fs==2.0.23
|
||||
graphviz==0.8.3
|
||||
idna==2.6
|
||||
fs==2.0.27
|
||||
graphviz==0.8.4
|
||||
idna==2.7
|
||||
jinja2==2.10
|
||||
markupsafe==1.0
|
||||
mondrian==0.7.0
|
||||
packaging==17.1
|
||||
pbr==4.0.3
|
||||
psutil==5.4.5
|
||||
pbr==4.2.0
|
||||
psutil==5.4.6
|
||||
pyparsing==2.2.0
|
||||
python-slugify==1.2.5
|
||||
pytz==2018.4
|
||||
requests==2.18.4
|
||||
pytz==2018.5
|
||||
requests==2.19.1
|
||||
six==1.11.0
|
||||
sqlalchemy==1.2.7
|
||||
stevedore==1.28.0
|
||||
sqlalchemy==1.2.10
|
||||
stevedore==1.29.0
|
||||
typing==3.6.4
|
||||
unidecode==1.0.22
|
||||
urllib3==1.22
|
||||
urllib3==1.23
|
||||
whistle==1.0.1
|
||||
|
||||
@ -3,22 +3,22 @@ appdirs==1.4.3
|
||||
certifi==2018.4.16
|
||||
chardet==3.0.4
|
||||
colorama==0.3.9
|
||||
fs==2.0.23
|
||||
graphviz==0.8.3
|
||||
idna==2.6
|
||||
fs==2.0.27
|
||||
graphviz==0.8.4
|
||||
idna==2.7
|
||||
jinja2==2.10
|
||||
markupsafe==1.0
|
||||
mondrian==0.7.0
|
||||
packaging==17.1
|
||||
pbr==4.0.3
|
||||
psutil==5.4.5
|
||||
pbr==4.2.0
|
||||
psutil==5.4.6
|
||||
pyparsing==2.2.0
|
||||
python-slugify==1.2.5
|
||||
pytz==2018.4
|
||||
requests==2.18.4
|
||||
pytz==2018.5
|
||||
requests==2.19.1
|
||||
six==1.11.0
|
||||
stevedore==1.28.0
|
||||
stevedore==1.29.0
|
||||
typing==3.6.4
|
||||
unidecode==1.0.22
|
||||
urllib3==1.22
|
||||
urllib3==1.23
|
||||
whistle==1.0.1
|
||||
|
||||
60
setup.py
60
setup.py
@ -1,11 +1,12 @@
|
||||
# Generated by Medikit 0.6.1 on 2018-05-21.
|
||||
# Generated by Medikit 0.6.3 on 2018-08-11.
|
||||
# All changes will be overriden.
|
||||
# Edit Projectfile and run “make update” (or “medikit update”) to regenerate.
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
from codecs import open
|
||||
from os import path
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
here = path.abspath(path.dirname(__file__))
|
||||
|
||||
# Py3 compatibility hacks, borrowed from IPython.
|
||||
@ -44,14 +45,17 @@ else:
|
||||
setup(
|
||||
author='Romain Dorgueil',
|
||||
author_email='romain@dorgueil.net',
|
||||
data_files=[(
|
||||
'share/jupyter/nbextensions/bonobo-jupyter', [
|
||||
'bonobo/contrib/jupyter/static/extension.js', 'bonobo/contrib/jupyter/static/index.js',
|
||||
'bonobo/contrib/jupyter/static/index.js.map'
|
||||
]
|
||||
)],
|
||||
description=('Bonobo, a simple, modern and atomic extract-transform-load toolkit for '
|
||||
'python 3.5+.'),
|
||||
data_files=[
|
||||
(
|
||||
'share/jupyter/nbextensions/bonobo-jupyter',
|
||||
[
|
||||
'bonobo/contrib/jupyter/static/extension.js',
|
||||
'bonobo/contrib/jupyter/static/index.js',
|
||||
'bonobo/contrib/jupyter/static/index.js.map',
|
||||
],
|
||||
)
|
||||
],
|
||||
description=('Bonobo, a simple, modern and atomic extract-transform-load toolkit for ' 'python 3.5+.'),
|
||||
license='Apache License, Version 2.0',
|
||||
name='bonobo',
|
||||
version=version,
|
||||
@ -60,26 +64,42 @@ setup(
|
||||
packages=find_packages(exclude=['ez_setup', 'example', 'test']),
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'fs (~= 2.0)', 'graphviz (>= 0.8, < 0.9)', 'jinja2 (~= 2.9)', 'mondrian (~= 0.7)', 'packaging (~= 17.0)',
|
||||
'psutil (~= 5.4)', 'python-slugify (~= 1.2.0)', 'requests (~= 2.0)', 'stevedore (~= 1.27)', 'whistle (~= 1.0)'
|
||||
'fs (~= 2.0)',
|
||||
'graphviz (>= 0.8, < 0.9)',
|
||||
'jinja2 (~= 2.9)',
|
||||
'mondrian (~= 0.7)',
|
||||
'packaging (~= 17.0)',
|
||||
'psutil (~= 5.4)',
|
||||
'python-slugify (~= 1.2.0)',
|
||||
'requests (~= 2.0)',
|
||||
'stevedore (~= 1.27)',
|
||||
'whistle (~= 1.0)',
|
||||
],
|
||||
extras_require={
|
||||
'dev': [
|
||||
'cookiecutter (>= 1.5, < 1.6)', 'coverage (~= 4.4)', 'pytest (~= 3.4)', 'pytest-cov (~= 2.5)',
|
||||
'pytest-timeout (>= 1, < 2)', 'sphinx (~= 1.7)', 'sphinx-sitemap (>= 0.2, < 0.3)', 'yapf'
|
||||
'cookiecutter (>= 1.5, < 1.6)',
|
||||
'coverage (~= 4.4)',
|
||||
'pytest (~= 3.4)',
|
||||
'pytest-cov (~= 2.5)',
|
||||
'pytest-timeout (>= 1, < 2)',
|
||||
'sphinx (~= 1.7)',
|
||||
'sphinx-sitemap (>= 0.2, < 0.3)',
|
||||
],
|
||||
'docker': ['bonobo-docker (~= 0.6.0a1)'],
|
||||
'jupyter': ['ipywidgets (~= 6.0)', 'jupyter (~= 1.0)'],
|
||||
'sqlalchemy': ['bonobo-sqlalchemy (~= 0.6.0a1)']
|
||||
'sqlalchemy': ['bonobo-sqlalchemy (~= 0.6.0a1)'],
|
||||
},
|
||||
entry_points={
|
||||
'bonobo.commands': [
|
||||
'convert = bonobo.commands.convert:ConvertCommand', 'download = bonobo.commands.download:DownloadCommand',
|
||||
'examples = bonobo.commands.examples:ExamplesCommand', 'init = bonobo.commands.init:InitCommand',
|
||||
'inspect = bonobo.commands.inspect:InspectCommand', 'run = bonobo.commands.run:RunCommand',
|
||||
'version = bonobo.commands.version:VersionCommand'
|
||||
'convert = bonobo.commands.convert:ConvertCommand',
|
||||
'download = bonobo.commands.download:DownloadCommand',
|
||||
'examples = bonobo.commands.examples:ExamplesCommand',
|
||||
'init = bonobo.commands.init:InitCommand',
|
||||
'inspect = bonobo.commands.inspect:InspectCommand',
|
||||
'run = bonobo.commands.run:RunCommand',
|
||||
'version = bonobo.commands.version:VersionCommand',
|
||||
],
|
||||
'console_scripts': ['bonobo = bonobo.commands:entrypoint']
|
||||
'console_scripts': ['bonobo = bonobo.commands:entrypoint'],
|
||||
},
|
||||
url='https://www.bonobo-project.org/',
|
||||
download_url='https://github.com/python-bonobo/bonobo/tarball/{version}'.format(version=version),
|
||||
|
||||
@ -9,17 +9,11 @@ def test_entrypoint():
|
||||
for command in pkg_resources.iter_entry_points('bonobo.commands'):
|
||||
commands[command.name] = command
|
||||
|
||||
assert not {
|
||||
'convert',
|
||||
'init',
|
||||
'inspect',
|
||||
'run',
|
||||
'version',
|
||||
}.difference(set(commands))
|
||||
assert not {'convert', 'init', 'inspect', 'run', 'version'}.difference(set(commands))
|
||||
|
||||
|
||||
@all_runners
|
||||
def test_no_command(runner):
|
||||
_, err, exc = runner(catch_errors=True)
|
||||
assert type(exc) == SystemExit
|
||||
assert 'error: the following arguments are required: command' in err
|
||||
assert 'error: the following arguments are required: command' in err
|
||||
|
||||
@ -27,8 +27,9 @@ def test_download_works_for_examples(runner):
|
||||
fout = io.BytesIO()
|
||||
fout.close = lambda: None
|
||||
|
||||
with patch('bonobo.commands.download._open_url') as mock_open_url, \
|
||||
patch('bonobo.commands.download.open') as mock_open:
|
||||
with patch('bonobo.commands.download._open_url') as mock_open_url, patch(
|
||||
'bonobo.commands.download.open'
|
||||
) as mock_open:
|
||||
mock_open_url.return_value = MockResponse()
|
||||
mock_open.return_value = fout
|
||||
runner('download', 'examples/datasets/coffeeshops.txt')
|
||||
@ -41,4 +42,4 @@ def test_download_works_for_examples(runner):
|
||||
@all_runners
|
||||
def test_download_fails_non_example(runner):
|
||||
with pytest.raises(ValueError):
|
||||
runner('download', 'something/entirely/different.txt')
|
||||
runner('download', 'something/entirely/different.txt')
|
||||
|
||||
@ -6,21 +6,14 @@ from bonobo.util.testing import EnvironmentTestCase
|
||||
@pytest.fixture
|
||||
def env1(tmpdir):
|
||||
env_file = tmpdir.join('.env_one')
|
||||
env_file.write('\n'.join((
|
||||
'SECRET=unknown',
|
||||
'PASSWORD=sweet',
|
||||
'PATH=first',
|
||||
)))
|
||||
env_file.write('\n'.join(('SECRET=unknown', 'PASSWORD=sweet', 'PATH=first')))
|
||||
return str(env_file)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def env2(tmpdir):
|
||||
env_file = tmpdir.join('.env_two')
|
||||
env_file.write('\n'.join((
|
||||
'PASSWORD=bitter',
|
||||
"PATH='second'",
|
||||
)))
|
||||
env_file.write('\n'.join(('PASSWORD=bitter', "PATH='second'")))
|
||||
return str(env_file)
|
||||
|
||||
|
||||
@ -71,7 +64,15 @@ class TestEnvFileCombinations(EnvironmentTestCase):
|
||||
|
||||
def test_run_with_both_env_files_then_overrides(self, runner, target, env1, env2):
|
||||
env = self.run_environ(
|
||||
runner, *target, '--default-env-file', env1, '--env-file', env2, '--env', 'PASSWORD=mine', '--env',
|
||||
runner,
|
||||
*target,
|
||||
'--default-env-file',
|
||||
env1,
|
||||
'--env-file',
|
||||
env2,
|
||||
'--env',
|
||||
'PASSWORD=mine',
|
||||
'--env',
|
||||
'SECRET=s3cr3t'
|
||||
)
|
||||
assert env.get('SECRET') == 's3cr3t'
|
||||
|
||||
@ -17,4 +17,4 @@ def test_version(runner):
|
||||
out, err = runner('version', '-qq')
|
||||
out = out.strip()
|
||||
assert not out.startswith('bonobo ')
|
||||
assert __version__ in out
|
||||
assert __version__ in out
|
||||
|
||||
@ -50,10 +50,7 @@ def test_define_with_decorator():
|
||||
calls = []
|
||||
|
||||
def my_handler(*args, **kwargs):
|
||||
calls.append((
|
||||
args,
|
||||
kwargs,
|
||||
))
|
||||
calls.append((args, kwargs))
|
||||
|
||||
Concrete = MethodBasedConfigurable(my_handler)
|
||||
|
||||
@ -77,10 +74,7 @@ def test_late_binding_method_decoration():
|
||||
|
||||
@MethodBasedConfigurable(foo='foo')
|
||||
def Concrete(*args, **kwargs):
|
||||
calls.append((
|
||||
args,
|
||||
kwargs,
|
||||
))
|
||||
calls.append((args, kwargs))
|
||||
|
||||
assert callable(Concrete.handler)
|
||||
t = Concrete(bar='baz')
|
||||
@ -95,10 +89,7 @@ def test_define_with_argument():
|
||||
calls = []
|
||||
|
||||
def concrete_handler(*args, **kwargs):
|
||||
calls.append((
|
||||
args,
|
||||
kwargs,
|
||||
))
|
||||
calls.append((args, kwargs))
|
||||
|
||||
t = MethodBasedConfigurable(concrete_handler, 'foo', bar='baz')
|
||||
assert callable(t.handler)
|
||||
@ -112,10 +103,7 @@ def test_define_with_inheritance():
|
||||
|
||||
class Inheriting(MethodBasedConfigurable):
|
||||
def handler(self, *args, **kwargs):
|
||||
calls.append((
|
||||
args,
|
||||
kwargs,
|
||||
))
|
||||
calls.append((args, kwargs))
|
||||
|
||||
t = Inheriting('foo', bar='baz')
|
||||
assert callable(t.handler)
|
||||
@ -132,10 +120,7 @@ def test_inheritance_then_decorate():
|
||||
|
||||
@Inheriting
|
||||
def Concrete(*args, **kwargs):
|
||||
calls.append((
|
||||
args,
|
||||
kwargs,
|
||||
))
|
||||
calls.append((args, kwargs))
|
||||
|
||||
assert callable(Concrete.handler)
|
||||
t = Concrete('foo', bar='baz')
|
||||
|
||||
@ -40,7 +40,7 @@ def test_partial():
|
||||
assert len(ci.options) == 4
|
||||
assert len(ci.processors) == 1
|
||||
assert ci.partial
|
||||
assert ci.partial[0] == (f1, )
|
||||
assert ci.partial[0] == (f1,)
|
||||
assert not len(ci.partial[1])
|
||||
|
||||
# instanciate a more complete partial instance ...
|
||||
@ -53,10 +53,7 @@ def test_partial():
|
||||
assert len(ci.options) == 4
|
||||
assert len(ci.processors) == 1
|
||||
assert ci.partial
|
||||
assert ci.partial[0] == (
|
||||
f1,
|
||||
f2,
|
||||
)
|
||||
assert ci.partial[0] == (f1, f2)
|
||||
assert not len(ci.partial[1])
|
||||
|
||||
c = C('foo')
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from operator import attrgetter
|
||||
|
||||
from bonobo.config import Configurable
|
||||
from bonobo.config.processors import ContextProcessor, resolve_processors, ContextCurrifier, use_context_processor
|
||||
from bonobo.config.processors import ContextCurrifier, ContextProcessor, resolve_processors, use_context_processor
|
||||
|
||||
|
||||
class CP1(Configurable):
|
||||
|
||||
@ -4,11 +4,11 @@ import time
|
||||
import pytest
|
||||
|
||||
from bonobo.config import Configurable, Container, Exclusive, Service, use
|
||||
from bonobo.config.services import validate_service_name, create_container
|
||||
from bonobo.config.services import create_container, validate_service_name
|
||||
from bonobo.util import get_name
|
||||
|
||||
|
||||
class PrinterInterface():
|
||||
class PrinterInterface:
|
||||
def print(self, *args):
|
||||
raise NotImplementedError()
|
||||
|
||||
@ -21,14 +21,11 @@ class ConcretePrinter(PrinterInterface):
|
||||
return ';'.join((self.prefix, *args))
|
||||
|
||||
|
||||
SERVICES = Container(
|
||||
printer0=ConcretePrinter(prefix='0'),
|
||||
printer1=ConcretePrinter(prefix='1'),
|
||||
)
|
||||
SERVICES = Container(printer0=ConcretePrinter(prefix='0'), printer1=ConcretePrinter(prefix='1'))
|
||||
|
||||
|
||||
class MyServiceDependantConfigurable(Configurable):
|
||||
printer = Service(PrinterInterface, )
|
||||
printer = Service(PrinterInterface)
|
||||
|
||||
def __call__(self, *args, printer: PrinterInterface):
|
||||
return printer.print(*args)
|
||||
@ -80,7 +77,7 @@ def test_exclusive():
|
||||
vcr.append(' '.join((prefix, str(i))))
|
||||
time.sleep(0.05)
|
||||
|
||||
threads = [threading.Thread(target=record, args=(str(i), )) for i in range(5)]
|
||||
threads = [threading.Thread(target=record, args=(str(i),)) for i in range(5)]
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
@ -90,8 +87,32 @@ def test_exclusive():
|
||||
thread.join()
|
||||
|
||||
assert vcr.tape == [
|
||||
'hello', '0 0', '0 1', '0 2', '0 3', '0 4', '1 0', '1 1', '1 2', '1 3', '1 4', '2 0', '2 1', '2 2', '2 3',
|
||||
'2 4', '3 0', '3 1', '3 2', '3 3', '3 4', '4 0', '4 1', '4 2', '4 3', '4 4'
|
||||
'hello',
|
||||
'0 0',
|
||||
'0 1',
|
||||
'0 2',
|
||||
'0 3',
|
||||
'0 4',
|
||||
'1 0',
|
||||
'1 1',
|
||||
'1 2',
|
||||
'1 3',
|
||||
'1 4',
|
||||
'2 0',
|
||||
'2 1',
|
||||
'2 2',
|
||||
'2 3',
|
||||
'2 4',
|
||||
'3 0',
|
||||
'3 1',
|
||||
'3 2',
|
||||
'3 3',
|
||||
'3 4',
|
||||
'4 0',
|
||||
'4 1',
|
||||
'4 2',
|
||||
'4 3',
|
||||
'4 4',
|
||||
]
|
||||
|
||||
|
||||
@ -118,10 +139,7 @@ def test_create_container_empty_values(services):
|
||||
|
||||
|
||||
def test_create_container_override():
|
||||
c = create_container({
|
||||
'http': 'http',
|
||||
'fs': 'fs',
|
||||
})
|
||||
c = create_container({'http': 'http', 'fs': 'fs'})
|
||||
assert len(c) == 2
|
||||
assert 'fs' in c and c['fs'] == 'fs'
|
||||
assert 'http' in c and c['http'] == 'http'
|
||||
|
||||
@ -3,10 +3,10 @@ from unittest.mock import MagicMock
|
||||
import pytest
|
||||
|
||||
from bonobo import Graph
|
||||
from bonobo.constants import EMPTY, NOT_MODIFIED, INHERIT
|
||||
from bonobo.constants import EMPTY, INHERIT, NOT_MODIFIED
|
||||
from bonobo.execution.contexts.node import NodeExecutionContext, split_token
|
||||
from bonobo.execution.strategies import NaiveStrategy
|
||||
from bonobo.util.testing import BufferingNodeExecutionContext, BufferingGraphExecutionContext
|
||||
from bonobo.util.testing import BufferingGraphExecutionContext, BufferingNodeExecutionContext
|
||||
|
||||
|
||||
def test_node_string():
|
||||
@ -18,7 +18,7 @@ def test_node_string():
|
||||
output = context.get_buffer()
|
||||
|
||||
assert len(output) == 1
|
||||
assert output[0] == ('foo', )
|
||||
assert output[0] == ('foo',)
|
||||
|
||||
def g():
|
||||
yield 'foo'
|
||||
@ -29,8 +29,8 @@ def test_node_string():
|
||||
output = context.get_buffer()
|
||||
|
||||
assert len(output) == 2
|
||||
assert output[0] == ('foo', )
|
||||
assert output[1] == ('bar', )
|
||||
assert output[0] == ('foo',)
|
||||
assert output[1] == ('bar',)
|
||||
|
||||
|
||||
def test_node_bytes():
|
||||
@ -42,7 +42,7 @@ def test_node_bytes():
|
||||
|
||||
output = context.get_buffer()
|
||||
assert len(output) == 1
|
||||
assert output[0] == (b'foo', )
|
||||
assert output[0] == (b'foo',)
|
||||
|
||||
def g():
|
||||
yield b'foo'
|
||||
@ -53,8 +53,8 @@ def test_node_bytes():
|
||||
output = context.get_buffer()
|
||||
|
||||
assert len(output) == 2
|
||||
assert output[0] == (b'foo', )
|
||||
assert output[1] == (b'bar', )
|
||||
assert output[0] == (b'foo',)
|
||||
assert output[1] == (b'bar',)
|
||||
|
||||
|
||||
def test_node_dict():
|
||||
@ -65,7 +65,7 @@ def test_node_dict():
|
||||
context.write_sync(EMPTY)
|
||||
output = context.get_buffer()
|
||||
assert len(output) == 1
|
||||
assert output[0] == ({'id': 1, 'name': 'foo'}, )
|
||||
assert output[0] == ({'id': 1, 'name': 'foo'},)
|
||||
|
||||
def g():
|
||||
yield {'id': 1, 'name': 'foo'}
|
||||
@ -75,8 +75,8 @@ def test_node_dict():
|
||||
context.write_sync(EMPTY)
|
||||
output = context.get_buffer()
|
||||
assert len(output) == 2
|
||||
assert output[0] == ({'id': 1, 'name': 'foo'}, )
|
||||
assert output[1] == ({'id': 2, 'name': 'bar'}, )
|
||||
assert output[0] == ({'id': 1, 'name': 'foo'},)
|
||||
assert output[1] == ({'id': 2, 'name': 'bar'},)
|
||||
|
||||
|
||||
def test_node_dict_chained():
|
||||
@ -93,7 +93,7 @@ def test_node_dict_chained():
|
||||
output = context.get_buffer()
|
||||
|
||||
assert len(output) == 1
|
||||
assert output[0] == ({'id': 1, 'name': 'FOO'}, )
|
||||
assert output[0] == ({'id': 1, 'name': 'FOO'},)
|
||||
|
||||
def g():
|
||||
yield {'id': 1, 'name': 'foo'}
|
||||
@ -104,8 +104,8 @@ def test_node_dict_chained():
|
||||
output = context.get_buffer()
|
||||
|
||||
assert len(output) == 2
|
||||
assert output[0] == ({'id': 1, 'name': 'FOO'}, )
|
||||
assert output[1] == ({'id': 2, 'name': 'BAR'}, )
|
||||
assert output[0] == ({'id': 1, 'name': 'FOO'},)
|
||||
assert output[1] == ({'id': 2, 'name': 'BAR'},)
|
||||
|
||||
|
||||
def test_node_tuple():
|
||||
@ -229,7 +229,7 @@ def test_node_lifecycle_with_kill():
|
||||
def test_split_token():
|
||||
assert split_token(('foo', 'bar')) == (set(), ('foo', 'bar'))
|
||||
assert split_token(()) == (set(), ())
|
||||
assert split_token('') == (set(), ('', ))
|
||||
assert split_token('') == (set(), ('',))
|
||||
|
||||
|
||||
def test_split_token_duplicate():
|
||||
@ -249,10 +249,10 @@ def test_split_token_not_modified():
|
||||
with pytest.raises(ValueError):
|
||||
split_token((INHERIT, NOT_MODIFIED))
|
||||
assert split_token(NOT_MODIFIED) == ({NOT_MODIFIED}, ())
|
||||
assert split_token((NOT_MODIFIED, )) == ({NOT_MODIFIED}, ())
|
||||
assert split_token((NOT_MODIFIED,)) == ({NOT_MODIFIED}, ())
|
||||
|
||||
|
||||
def test_split_token_inherit():
|
||||
assert split_token(INHERIT) == ({INHERIT}, ())
|
||||
assert split_token((INHERIT, )) == ({INHERIT}, ())
|
||||
assert split_token((INHERIT,)) == ({INHERIT}, ())
|
||||
assert split_token((INHERIT, 'foo', 'bar')) == ({INHERIT}, ('foo', 'bar'))
|
||||
|
||||
@ -14,16 +14,11 @@ class ResponseMock:
|
||||
return {}
|
||||
else:
|
||||
self.count += 1
|
||||
return {
|
||||
'records': self.json_value,
|
||||
}
|
||||
return {'records': self.json_value}
|
||||
|
||||
|
||||
def test_read_from_opendatasoft_api():
|
||||
extract = OpenDataSoftAPI(dataset='test-a-set')
|
||||
with patch('requests.get', return_value=ResponseMock([
|
||||
{'fields': {'foo': 'bar'}},
|
||||
{'fields': {'foo': 'zab'}},
|
||||
])):
|
||||
with patch('requests.get', return_value=ResponseMock([{'fields': {'foo': 'bar'}}, {'fields': {'foo': 'zab'}}])):
|
||||
for line in extract('http://example.com/', ValueHolder(0)):
|
||||
assert 'foo' in line
|
||||
|
||||
@ -1,10 +1,7 @@
|
||||
from bonobo.constants import INHERIT
|
||||
from bonobo.util.testing import BufferingNodeExecutionContext
|
||||
|
||||
messages = [
|
||||
('Hello', ),
|
||||
('Goodbye', ),
|
||||
]
|
||||
messages = [('Hello',), ('Goodbye',)]
|
||||
|
||||
|
||||
def append(*args):
|
||||
@ -15,7 +12,7 @@ def test_inherit():
|
||||
with BufferingNodeExecutionContext(append) as context:
|
||||
context.write_sync(*messages)
|
||||
|
||||
assert context.get_buffer() == list(map(lambda x: x + ('!', ), messages))
|
||||
assert context.get_buffer() == list(map(lambda x: x + ('!',), messages))
|
||||
|
||||
|
||||
def test_inherit_bag_tuple():
|
||||
@ -24,4 +21,4 @@ def test_inherit_bag_tuple():
|
||||
context.write_sync(*messages)
|
||||
|
||||
assert context.get_output_fields() == ('message', '0')
|
||||
assert context.get_buffer() == list(map(lambda x: x + ('!', ), messages))
|
||||
assert context.get_buffer() == list(map(lambda x: x + ('!',), messages))
|
||||
|
||||
@ -7,10 +7,7 @@ def useless(*args, **kwargs):
|
||||
|
||||
|
||||
def test_not_modified():
|
||||
input_messages = [
|
||||
('foo', 'bar'),
|
||||
('foo', 'baz'),
|
||||
]
|
||||
input_messages = [('foo', 'bar'), ('foo', 'baz')]
|
||||
|
||||
with BufferingNodeExecutionContext(useless) as context:
|
||||
context.write_sync(*input_messages)
|
||||
|
||||
@ -6,8 +6,9 @@ import pytest
|
||||
|
||||
from bonobo import CsvReader, CsvWriter
|
||||
from bonobo.constants import EMPTY
|
||||
from bonobo.util.testing import FilesystemTester, BufferingNodeExecutionContext, WriterTest, ConfigurableNodeTest, \
|
||||
ReaderTest
|
||||
from bonobo.util.testing import (
|
||||
BufferingNodeExecutionContext, ConfigurableNodeTest, FilesystemTester, ReaderTest, WriterTest
|
||||
)
|
||||
|
||||
csv_tester = FilesystemTester('csv')
|
||||
csv_tester.input_data = 'a,b,c\na foo,b foo,c foo\na bar,b bar,c bar'
|
||||
@ -23,15 +24,10 @@ def test_read_csv_from_file_kwargs(tmpdir):
|
||||
with BufferingNodeExecutionContext(CsvReader(filename, **defaults), services=services) as context:
|
||||
context.write_sync(EMPTY)
|
||||
|
||||
assert context.get_buffer_args_as_dicts() == [{
|
||||
'a': 'a foo',
|
||||
'b': 'b foo',
|
||||
'c': 'c foo',
|
||||
}, {
|
||||
'a': 'a bar',
|
||||
'b': 'b bar',
|
||||
'c': 'c bar',
|
||||
}]
|
||||
assert context.get_buffer_args_as_dicts() == [
|
||||
{'a': 'a foo', 'b': 'b foo', 'c': 'c foo'},
|
||||
{'a': 'a bar', 'b': 'b bar', 'c': 'c bar'},
|
||||
]
|
||||
|
||||
|
||||
###
|
||||
@ -50,22 +46,11 @@ LL = ('i', 'have', 'more', 'values')
|
||||
|
||||
|
||||
class CsvReaderTest(Csv, ReaderTest, TestCase):
|
||||
input_data = '\n'.join((
|
||||
'id,name',
|
||||
'1,John Doe',
|
||||
'2,Jane Doe',
|
||||
',DPR',
|
||||
'42,Elon Musk',
|
||||
))
|
||||
input_data = '\n'.join(('id,name', '1,John Doe', '2,Jane Doe', ',DPR', '42,Elon Musk'))
|
||||
|
||||
def check_output(self, context, *, prepend=None):
|
||||
out = context.get_buffer()
|
||||
assert out == (prepend or list()) + [
|
||||
('1', 'John Doe'),
|
||||
('2', 'Jane Doe'),
|
||||
('', 'DPR'),
|
||||
('42', 'Elon Musk'),
|
||||
]
|
||||
assert out == (prepend or list()) + [('1', 'John Doe'), ('2', 'Jane Doe'), ('', 'DPR'), ('42', 'Elon Musk')]
|
||||
|
||||
@incontext()
|
||||
def test_nofields(self, context):
|
||||
@ -80,12 +65,7 @@ class CsvReaderTest(Csv, ReaderTest, TestCase):
|
||||
context.stop()
|
||||
self.check_output(context, prepend=[('id', 'name')])
|
||||
|
||||
@incontext(
|
||||
output_fields=(
|
||||
'x',
|
||||
'y',
|
||||
), skip=1
|
||||
)
|
||||
@incontext(output_fields=('x', 'y'), skip=1)
|
||||
def test_output_fields(self, context):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
@ -107,11 +87,7 @@ class CsvWriterTest(Csv, WriterTest, TestCase):
|
||||
context.write_sync(('a', 'b'), ('c', 'd'))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'foo,bar',
|
||||
'a,b',
|
||||
'c,d',
|
||||
)
|
||||
assert self.readlines() == ('foo,bar', 'a,b', 'c,d')
|
||||
|
||||
@incontext()
|
||||
def test_fields_from_type(self, context):
|
||||
@ -127,30 +103,21 @@ class CsvWriterTest(Csv, WriterTest, TestCase):
|
||||
context.write_sync((L1, L2), (L3, L4))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'a,hey',
|
||||
'b,bee',
|
||||
'c,see',
|
||||
'd,dee',
|
||||
)
|
||||
assert self.readlines() == ('a,hey', 'b,bee', 'c,see', 'd,dee')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_multiple_args_length_mismatch(self, context):
|
||||
# if length of input vary, then we get a TypeError (unrecoverable)
|
||||
with pytest.raises(TypeError):
|
||||
context.write_sync((L1, L2), (L3, ))
|
||||
context.write_sync((L1, L2), (L3,))
|
||||
|
||||
@incontext()
|
||||
def test_nofields_single_arg(self, context):
|
||||
# single args are just dumped, shapes can vary.
|
||||
context.write_sync((L1, ), (LL, ), (L3, ))
|
||||
context.write_sync((L1,), (LL,), (L3,))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'a,hey',
|
||||
'i,have,more,values',
|
||||
'c,see',
|
||||
)
|
||||
assert self.readlines() == ('a,hey', 'i,have,more,values', 'c,see')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_empty_args(self, context):
|
||||
|
||||
@ -21,10 +21,7 @@ def test_file_writer_contextless(tmpdir):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'lines,output',
|
||||
[
|
||||
(('ACME', ), 'ACME'), # one line...
|
||||
(('Foo', 'Bar', 'Baz'), 'Foo\nBar\nBaz'), # more than one line...
|
||||
]
|
||||
[(('ACME',), 'ACME'), (('Foo', 'Bar', 'Baz'), 'Foo\nBar\nBaz')], # one line... # more than one line...
|
||||
)
|
||||
def test_file_writer_in_context(tmpdir, lines, output):
|
||||
fs, filename, services = txt_tester.get_services_for_writer(tmpdir)
|
||||
@ -44,5 +41,5 @@ def test_file_reader(tmpdir):
|
||||
|
||||
output = context.get_buffer()
|
||||
assert len(output) == 2
|
||||
assert output[0] == ('Hello', )
|
||||
assert output[1] == ('World', )
|
||||
assert output[0] == ('Hello',)
|
||||
assert output[1] == ('World',)
|
||||
|
||||
@ -4,10 +4,9 @@ from unittest import TestCase
|
||||
|
||||
import pytest
|
||||
|
||||
from bonobo import JsonReader, JsonWriter
|
||||
from bonobo import LdjsonReader, LdjsonWriter
|
||||
from bonobo import JsonReader, JsonWriter, LdjsonReader, LdjsonWriter
|
||||
from bonobo.constants import EMPTY
|
||||
from bonobo.util.testing import WriterTest, ReaderTest, ConfigurableNodeTest
|
||||
from bonobo.util.testing import ConfigurableNodeTest, ReaderTest, WriterTest
|
||||
|
||||
FOOBAR = {'foo': 'bar'}
|
||||
OD_ABC = OrderedDict((('a', 'A'), ('b', 'B'), ('c', 'C')))
|
||||
@ -34,14 +33,7 @@ class JsonReaderDictsTest(Json, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
({
|
||||
"foo": "bar"
|
||||
}, ),
|
||||
({
|
||||
"baz": "boz"
|
||||
}, ),
|
||||
]
|
||||
assert context.get_buffer() == [({"foo": "bar"},), ({"baz": "boz"},)]
|
||||
|
||||
|
||||
class JsonReaderListsTest(Json, ReaderTest, TestCase):
|
||||
@ -52,20 +44,14 @@ class JsonReaderListsTest(Json, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
([1, 2, 3], ),
|
||||
([4, 5, 6], ),
|
||||
]
|
||||
assert context.get_buffer() == [([1, 2, 3],), ([4, 5, 6],)]
|
||||
|
||||
@incontext(output_type=tuple)
|
||||
def test_output_type(self, context):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
([1, 2, 3], ),
|
||||
([4, 5, 6], ),
|
||||
]
|
||||
assert context.get_buffer() == [([1, 2, 3],), ([4, 5, 6],)]
|
||||
|
||||
|
||||
class JsonReaderStringsTest(Json, ReaderTest, TestCase):
|
||||
@ -76,22 +62,14 @@ class JsonReaderStringsTest(Json, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
('foo', ),
|
||||
('bar', ),
|
||||
('baz', ),
|
||||
]
|
||||
assert context.get_buffer() == [('foo',), ('bar',), ('baz',)]
|
||||
|
||||
@incontext(output_type=tuple)
|
||||
def test_output_type(self, context):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
('foo', ),
|
||||
('bar', ),
|
||||
('baz', ),
|
||||
]
|
||||
assert context.get_buffer() == [('foo',), ('bar',), ('baz',)]
|
||||
|
||||
|
||||
class JsonWriterTest(Json, WriterTest, TestCase):
|
||||
@ -101,10 +79,7 @@ class JsonWriterTest(Json, WriterTest, TestCase):
|
||||
context.write_sync(('a', 'b'), ('c', 'd'))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'[{"foo": "a", "bar": "b"},',
|
||||
'{"foo": "c", "bar": "d"}]',
|
||||
)
|
||||
assert self.readlines() == ('[{"foo": "a", "bar": "b"},', '{"foo": "c", "bar": "d"}]')
|
||||
|
||||
@incontext()
|
||||
def test_fields_from_type(self, context):
|
||||
@ -112,10 +87,7 @@ class JsonWriterTest(Json, WriterTest, TestCase):
|
||||
context.write_sync((1, 2), (3, 4))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'[{"x": 1, "y": 2},',
|
||||
'{"x": 3, "y": 4}]',
|
||||
)
|
||||
assert self.readlines() == ('[{"x": 1, "y": 2},', '{"x": 3, "y": 4}]')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_multiple_args(self, context):
|
||||
@ -144,11 +116,7 @@ class JsonWriterTest(Json, WriterTest, TestCase):
|
||||
context.write_sync(FOOBAR, OD_ABC, FOOBAZ)
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'[{"foo": "bar"},',
|
||||
'{"a": "A", "b": "B", "c": "C"},',
|
||||
'{"foo": "baz"}]',
|
||||
)
|
||||
assert self.readlines() == ('[{"foo": "bar"},', '{"a": "A", "b": "B", "c": "C"},', '{"foo": "baz"}]')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_empty_args(self, context):
|
||||
@ -156,7 +124,7 @@ class JsonWriterTest(Json, WriterTest, TestCase):
|
||||
context.write_sync(EMPTY, EMPTY, EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == ('[]', )
|
||||
assert self.readlines() == ('[]',)
|
||||
|
||||
|
||||
###
|
||||
@ -178,14 +146,7 @@ class LdjsonReaderDictsTest(Ldjson, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
({
|
||||
"foo": "bar"
|
||||
}, ),
|
||||
({
|
||||
"baz": "boz"
|
||||
}, ),
|
||||
]
|
||||
assert context.get_buffer() == [({"foo": "bar"},), ({"baz": "boz"},)]
|
||||
|
||||
|
||||
class LdjsonReaderListsTest(Ldjson, ReaderTest, TestCase):
|
||||
@ -196,20 +157,14 @@ class LdjsonReaderListsTest(Ldjson, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
([1, 2, 3], ),
|
||||
([4, 5, 6], ),
|
||||
]
|
||||
assert context.get_buffer() == [([1, 2, 3],), ([4, 5, 6],)]
|
||||
|
||||
@incontext(output_type=tuple)
|
||||
def test_output_type(self, context):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
([1, 2, 3], ),
|
||||
([4, 5, 6], ),
|
||||
]
|
||||
assert context.get_buffer() == [([1, 2, 3],), ([4, 5, 6],)]
|
||||
|
||||
|
||||
class LdjsonReaderStringsTest(Ldjson, ReaderTest, TestCase):
|
||||
@ -220,22 +175,14 @@ class LdjsonReaderStringsTest(Ldjson, ReaderTest, TestCase):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
('foo', ),
|
||||
('bar', ),
|
||||
('baz', ),
|
||||
]
|
||||
assert context.get_buffer() == [('foo',), ('bar',), ('baz',)]
|
||||
|
||||
@incontext(output_type=tuple)
|
||||
def test_output_type(self, context):
|
||||
context.write_sync(EMPTY)
|
||||
context.stop()
|
||||
|
||||
assert context.get_buffer() == [
|
||||
('foo', ),
|
||||
('bar', ),
|
||||
('baz', ),
|
||||
]
|
||||
assert context.get_buffer() == [('foo',), ('bar',), ('baz',)]
|
||||
|
||||
|
||||
class LdjsonWriterTest(Ldjson, WriterTest, TestCase):
|
||||
@ -253,10 +200,7 @@ class LdjsonWriterTest(Ldjson, WriterTest, TestCase):
|
||||
context.write_sync((1, 2), (3, 4))
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'{"x": 1, "y": 2}',
|
||||
'{"x": 3, "y": 4}',
|
||||
)
|
||||
assert self.readlines() == ('{"x": 1, "y": 2}', '{"x": 3, "y": 4}')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_multiple_args(self, context):
|
||||
@ -285,11 +229,7 @@ class LdjsonWriterTest(Ldjson, WriterTest, TestCase):
|
||||
context.write_sync(FOOBAR, OD_ABC, FOOBAZ)
|
||||
context.stop()
|
||||
|
||||
assert self.readlines() == (
|
||||
'{"foo": "bar"}',
|
||||
'{"a": "A", "b": "B", "c": "C"}',
|
||||
'{"foo": "baz"}',
|
||||
)
|
||||
assert self.readlines() == ('{"foo": "bar"}', '{"a": "A", "b": "B", "c": "C"}', '{"foo": "baz"}')
|
||||
|
||||
@incontext()
|
||||
def test_nofields_empty_args(self, context):
|
||||
|
||||
@ -32,7 +32,4 @@ def test_read_pickled_list_from_file(tmpdir):
|
||||
|
||||
output = context.get_buffer()
|
||||
assert context.get_output_fields() == ('a', 'b', 'c')
|
||||
assert output == [
|
||||
('a foo', 'b foo', 'c foo'),
|
||||
('a bar', 'b bar', 'c bar'),
|
||||
]
|
||||
assert output == [('a foo', 'b foo', 'c foo'), ('a bar', 'b bar', 'c bar')]
|
||||
|
||||
@ -5,9 +5,9 @@ from unittest.mock import MagicMock
|
||||
import pytest
|
||||
|
||||
import bonobo
|
||||
from bonobo.constants import NOT_MODIFIED, EMPTY
|
||||
from bonobo.util import ensure_tuple, ValueHolder
|
||||
from bonobo.util.testing import BufferingNodeExecutionContext, StaticNodeTest, ConfigurableNodeTest
|
||||
from bonobo.constants import EMPTY, NOT_MODIFIED
|
||||
from bonobo.util import ValueHolder, ensure_tuple
|
||||
from bonobo.util.testing import BufferingNodeExecutionContext, ConfigurableNodeTest, StaticNodeTest
|
||||
|
||||
|
||||
class CountTest(StaticNodeTest, TestCase):
|
||||
@ -26,7 +26,7 @@ class CountTest(StaticNodeTest, TestCase):
|
||||
def test_execution(self):
|
||||
with self.execute() as context:
|
||||
context.write_sync(*([EMPTY] * 42))
|
||||
assert context.get_buffer() == [(42, )]
|
||||
assert context.get_buffer() == [(42,)]
|
||||
|
||||
|
||||
class IdentityTest(StaticNodeTest, TestCase):
|
||||
@ -98,14 +98,11 @@ def test_fixedwindow():
|
||||
|
||||
with BufferingNodeExecutionContext(bonobo.FixedWindow(2)) as context:
|
||||
context.write_sync(*range(9))
|
||||
assert context.get_buffer() == [(0, 1), (2, 3), (4, 5), (6, 7), (
|
||||
8,
|
||||
None,
|
||||
)]
|
||||
assert context.get_buffer() == [(0, 1), (2, 3), (4, 5), (6, 7), (8, None)]
|
||||
|
||||
with BufferingNodeExecutionContext(bonobo.FixedWindow(1)) as context:
|
||||
context.write_sync(*range(3))
|
||||
assert context.get_buffer() == [(0, ), (1, ), (2, )]
|
||||
assert context.get_buffer() == [(0,), (1,), (2,)]
|
||||
|
||||
|
||||
def test_methodcaller():
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from whistle import EventDispatcher
|
||||
|
||||
import bonobo
|
||||
from bonobo.execution import events
|
||||
from bonobo.execution.contexts.graph import GraphExecutionContext
|
||||
from bonobo.plugins.console import ConsoleOutputPlugin
|
||||
from whistle import EventDispatcher
|
||||
|
||||
|
||||
def test_register_unregister():
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from unittest.mock import sentinel
|
||||
|
||||
import pytest
|
||||
|
||||
from bonobo.constants import BEGIN
|
||||
from bonobo.structs import Graph
|
||||
|
||||
@ -48,24 +48,14 @@ def test_graph_add_chain():
|
||||
def test_graph_topological_sort():
|
||||
g = Graph()
|
||||
|
||||
g.add_chain(
|
||||
sentinel.a1,
|
||||
sentinel.a2,
|
||||
sentinel.a3,
|
||||
_input=None,
|
||||
_output=None,
|
||||
)
|
||||
g.add_chain(sentinel.a1, sentinel.a2, sentinel.a3, _input=None, _output=None)
|
||||
|
||||
assert g.topologically_sorted_indexes == (0, 1, 2)
|
||||
assert g[0] == sentinel.a1
|
||||
assert g[1] == sentinel.a2
|
||||
assert g[2] == sentinel.a3
|
||||
|
||||
g.add_chain(
|
||||
sentinel.b1,
|
||||
sentinel.b2,
|
||||
_output=sentinel.a2,
|
||||
)
|
||||
g.add_chain(sentinel.b1, sentinel.b2, _output=sentinel.a2)
|
||||
|
||||
assert g.topologically_sorted_indexes[-2:] == (1, 2)
|
||||
assert g.topologically_sorted_indexes.index(3) < g.topologically_sorted_indexes.index(4)
|
||||
|
||||
@ -19,7 +19,7 @@ from queue import Empty
|
||||
import pytest
|
||||
|
||||
from bonobo.constants import BEGIN, END
|
||||
from bonobo.errors import InactiveWritableError, InactiveReadableError
|
||||
from bonobo.errors import InactiveReadableError, InactiveWritableError
|
||||
from bonobo.structs.inputs import Input
|
||||
|
||||
|
||||
|
||||
@ -10,7 +10,7 @@ def generate_integers():
|
||||
|
||||
|
||||
def square(i):
|
||||
return i**2
|
||||
return i ** 2
|
||||
|
||||
|
||||
def results(f, context):
|
||||
|
||||
@ -147,14 +147,14 @@ class TestBagType(unittest.TestCase):
|
||||
self.assertEqual(Zero()._asdict(), {})
|
||||
self.assertEqual(Zero()._fields, ())
|
||||
|
||||
Dot = BagType('Dot', ('d', ))
|
||||
self.assertEqual(Dot(1), (1, ))
|
||||
self.assertEqual(Dot._make([1]), (1, ))
|
||||
Dot = BagType('Dot', ('d',))
|
||||
self.assertEqual(Dot(1), (1,))
|
||||
self.assertEqual(Dot._make([1]), (1,))
|
||||
self.assertEqual(Dot(1).d, 1)
|
||||
self.assertEqual(repr(Dot(1)), 'Dot(d=1)')
|
||||
self.assertEqual(Dot(1)._asdict(), {'d': 1})
|
||||
self.assertEqual(Dot(1)._replace(d=999), (999, ))
|
||||
self.assertEqual(Dot(1)._fields, ('d', ))
|
||||
self.assertEqual(Dot(1)._replace(d=999), (999,))
|
||||
self.assertEqual(Dot(1)._fields, ('d',))
|
||||
|
||||
n = 5000 if sys.version_info >= (3, 7) else 254
|
||||
names = list(set(''.join([choice(string.ascii_letters) for j in range(10)]) for i in range(n)))
|
||||
@ -178,7 +178,7 @@ class TestBagType(unittest.TestCase):
|
||||
|
||||
def test_pickle(self):
|
||||
p = TBag(x=10, y=20, z=30)
|
||||
for module in (pickle, ):
|
||||
for module in (pickle,):
|
||||
loads = getattr(module, 'loads')
|
||||
dumps = getattr(module, 'dumps')
|
||||
for protocol in range(-1, module.HIGHEST_PROTOCOL + 1):
|
||||
@ -206,25 +206,191 @@ class TestBagType(unittest.TestCase):
|
||||
# Broader test of all interesting names taken from the code, old
|
||||
# template, and an example
|
||||
words = {
|
||||
'Alias', 'At', 'AttributeError', 'Build', 'Bypass', 'Create', 'Encountered', 'Expected', 'Field', 'For',
|
||||
'Got', 'Helper', 'IronPython', 'Jython', 'KeyError', 'Make', 'Modify', 'Note', 'OrderedDict', 'Point',
|
||||
'Return', 'Returns', 'Type', 'TypeError', 'Used', 'Validate', 'ValueError', 'Variables', 'a', 'accessible',
|
||||
'add', 'added', 'all', 'also', 'an', 'arg_list', 'args', 'arguments', 'automatically', 'be', 'build',
|
||||
'builtins', 'but', 'by', 'cannot', 'class_namespace', 'classmethod', 'cls', 'collections', 'convert',
|
||||
'copy', 'created', 'creation', 'd', 'debugging', 'defined', 'dict', 'dictionary', 'doc', 'docstring',
|
||||
'docstrings', 'duplicate', 'effect', 'either', 'enumerate', 'environments', 'error', 'example', 'exec', 'f',
|
||||
'f_globals', 'field', 'field_names', 'fields', 'formatted', 'frame', 'function', 'functions', 'generate',
|
||||
'getter', 'got', 'greater', 'has', 'help', 'identifiers', 'indexable', 'instance', 'instantiate',
|
||||
'interning', 'introspection', 'isidentifier', 'isinstance', 'itemgetter', 'iterable', 'join', 'keyword',
|
||||
'keywords', 'kwds', 'len', 'like', 'list', 'map', 'maps', 'message', 'metadata', 'method', 'methods',
|
||||
'module', 'module_name', 'must', 'name', 'named', 'namedtuple', 'namedtuple_', 'names', 'namespace',
|
||||
'needs', 'new', 'nicely', 'num_fields', 'number', 'object', 'of', 'operator', 'option', 'p', 'particular',
|
||||
'pickle', 'pickling', 'plain', 'pop', 'positional', 'property', 'r', 'regular', 'rename', 'replace',
|
||||
'replacing', 'repr', 'repr_fmt', 'representation', 'result', 'reuse_itemgetter', 's', 'seen', 'sequence',
|
||||
'set', 'side', 'specified', 'split', 'start', 'startswith', 'step', 'str', 'string', 'strings', 'subclass',
|
||||
'sys', 'targets', 'than', 'the', 'their', 'this', 'to', 'tuple_new', 'type', 'typename', 'underscore',
|
||||
'unexpected', 'unpack', 'up', 'use', 'used', 'user', 'valid', 'values', 'variable', 'verbose', 'where',
|
||||
'which', 'work', 'x', 'y', 'z', 'zip'
|
||||
'Alias',
|
||||
'At',
|
||||
'AttributeError',
|
||||
'Build',
|
||||
'Bypass',
|
||||
'Create',
|
||||
'Encountered',
|
||||
'Expected',
|
||||
'Field',
|
||||
'For',
|
||||
'Got',
|
||||
'Helper',
|
||||
'IronPython',
|
||||
'Jython',
|
||||
'KeyError',
|
||||
'Make',
|
||||
'Modify',
|
||||
'Note',
|
||||
'OrderedDict',
|
||||
'Point',
|
||||
'Return',
|
||||
'Returns',
|
||||
'Type',
|
||||
'TypeError',
|
||||
'Used',
|
||||
'Validate',
|
||||
'ValueError',
|
||||
'Variables',
|
||||
'a',
|
||||
'accessible',
|
||||
'add',
|
||||
'added',
|
||||
'all',
|
||||
'also',
|
||||
'an',
|
||||
'arg_list',
|
||||
'args',
|
||||
'arguments',
|
||||
'automatically',
|
||||
'be',
|
||||
'build',
|
||||
'builtins',
|
||||
'but',
|
||||
'by',
|
||||
'cannot',
|
||||
'class_namespace',
|
||||
'classmethod',
|
||||
'cls',
|
||||
'collections',
|
||||
'convert',
|
||||
'copy',
|
||||
'created',
|
||||
'creation',
|
||||
'd',
|
||||
'debugging',
|
||||
'defined',
|
||||
'dict',
|
||||
'dictionary',
|
||||
'doc',
|
||||
'docstring',
|
||||
'docstrings',
|
||||
'duplicate',
|
||||
'effect',
|
||||
'either',
|
||||
'enumerate',
|
||||
'environments',
|
||||
'error',
|
||||
'example',
|
||||
'exec',
|
||||
'f',
|
||||
'f_globals',
|
||||
'field',
|
||||
'field_names',
|
||||
'fields',
|
||||
'formatted',
|
||||
'frame',
|
||||
'function',
|
||||
'functions',
|
||||
'generate',
|
||||
'getter',
|
||||
'got',
|
||||
'greater',
|
||||
'has',
|
||||
'help',
|
||||
'identifiers',
|
||||
'indexable',
|
||||
'instance',
|
||||
'instantiate',
|
||||
'interning',
|
||||
'introspection',
|
||||
'isidentifier',
|
||||
'isinstance',
|
||||
'itemgetter',
|
||||
'iterable',
|
||||
'join',
|
||||
'keyword',
|
||||
'keywords',
|
||||
'kwds',
|
||||
'len',
|
||||
'like',
|
||||
'list',
|
||||
'map',
|
||||
'maps',
|
||||
'message',
|
||||
'metadata',
|
||||
'method',
|
||||
'methods',
|
||||
'module',
|
||||
'module_name',
|
||||
'must',
|
||||
'name',
|
||||
'named',
|
||||
'namedtuple',
|
||||
'namedtuple_',
|
||||
'names',
|
||||
'namespace',
|
||||
'needs',
|
||||
'new',
|
||||
'nicely',
|
||||
'num_fields',
|
||||
'number',
|
||||
'object',
|
||||
'of',
|
||||
'operator',
|
||||
'option',
|
||||
'p',
|
||||
'particular',
|
||||
'pickle',
|
||||
'pickling',
|
||||
'plain',
|
||||
'pop',
|
||||
'positional',
|
||||
'property',
|
||||
'r',
|
||||
'regular',
|
||||
'rename',
|
||||
'replace',
|
||||
'replacing',
|
||||
'repr',
|
||||
'repr_fmt',
|
||||
'representation',
|
||||
'result',
|
||||
'reuse_itemgetter',
|
||||
's',
|
||||
'seen',
|
||||
'sequence',
|
||||
'set',
|
||||
'side',
|
||||
'specified',
|
||||
'split',
|
||||
'start',
|
||||
'startswith',
|
||||
'step',
|
||||
'str',
|
||||
'string',
|
||||
'strings',
|
||||
'subclass',
|
||||
'sys',
|
||||
'targets',
|
||||
'than',
|
||||
'the',
|
||||
'their',
|
||||
'this',
|
||||
'to',
|
||||
'tuple_new',
|
||||
'type',
|
||||
'typename',
|
||||
'underscore',
|
||||
'unexpected',
|
||||
'unpack',
|
||||
'up',
|
||||
'use',
|
||||
'used',
|
||||
'user',
|
||||
'valid',
|
||||
'values',
|
||||
'variable',
|
||||
'verbose',
|
||||
'where',
|
||||
'which',
|
||||
'work',
|
||||
'x',
|
||||
'y',
|
||||
'z',
|
||||
'zip',
|
||||
}
|
||||
sorted_words = tuple(sorted(words))
|
||||
T = BagType('T', sorted_words)
|
||||
@ -252,7 +418,7 @@ class TestBagType(unittest.TestCase):
|
||||
self.assertEqual(t.__getnewargs__(), values)
|
||||
|
||||
def test_repr(self):
|
||||
A = BagType('A', ('x', ))
|
||||
A = BagType('A', ('x',))
|
||||
self.assertEqual(repr(A(1)), 'A(x=1)')
|
||||
|
||||
# repr should show the name of the subclass
|
||||
@ -273,6 +439,18 @@ class TestBagType(unittest.TestCase):
|
||||
|
||||
def test_annoying_attribute_names(self):
|
||||
self._create(
|
||||
'__slots__', '__getattr__', '_attrs', '_fields', '__new__', '__getnewargs__', '__repr__', '_make', 'get',
|
||||
'_replace', '_asdict', '_cls', 'self', 'tuple'
|
||||
'__slots__',
|
||||
'__getattr__',
|
||||
'_attrs',
|
||||
'_fields',
|
||||
'__new__',
|
||||
'__getnewargs__',
|
||||
'__repr__',
|
||||
'_make',
|
||||
'get',
|
||||
'_replace',
|
||||
'_asdict',
|
||||
'_cls',
|
||||
'self',
|
||||
'tuple',
|
||||
)
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from bonobo.util import sortedlist, ensure_tuple
|
||||
from bonobo.util.collections import tuplize, cast
|
||||
from bonobo.util import ensure_tuple, sortedlist
|
||||
from bonobo.util.collections import cast, tuplize
|
||||
|
||||
|
||||
def test_sortedlist():
|
||||
@ -14,8 +14,8 @@ def test_sortedlist():
|
||||
|
||||
|
||||
def test_ensure_tuple():
|
||||
assert ensure_tuple('a') == ('a', )
|
||||
assert ensure_tuple(('a', )) == ('a', )
|
||||
assert ensure_tuple('a') == ('a',)
|
||||
assert ensure_tuple(('a',)) == ('a',)
|
||||
assert ensure_tuple(()) is ()
|
||||
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ import operator
|
||||
|
||||
import pytest
|
||||
|
||||
from bonobo.util.objects import Wrapper, get_name, ValueHolder, get_attribute_or_create
|
||||
from bonobo.util.objects import ValueHolder, Wrapper, get_attribute_or_create, get_name
|
||||
from bonobo.util.testing import optional_contextmanager
|
||||
|
||||
|
||||
@ -65,10 +65,7 @@ def test_valueholder_notequal():
|
||||
assert not (x != 42)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('rlo,rhi', [
|
||||
(1, 2),
|
||||
('a', 'b'),
|
||||
])
|
||||
@pytest.mark.parametrize('rlo,rhi', [(1, 2), ('a', 'b')])
|
||||
def test_valueholder_ordering(rlo, rhi):
|
||||
vlo, vhi = ValueHolder(rlo), ValueHolder(rhi)
|
||||
|
||||
@ -129,15 +126,27 @@ def test_get_attribute_or_create():
|
||||
unsupported_operations = {
|
||||
int: {operator.matmul},
|
||||
str: {
|
||||
operator.sub, operator.mul, operator.matmul, operator.floordiv, operator.truediv, operator.mod, divmod,
|
||||
operator.pow, operator.lshift, operator.rshift, operator.and_, operator.xor, operator.or_
|
||||
operator.sub,
|
||||
operator.mul,
|
||||
operator.matmul,
|
||||
operator.floordiv,
|
||||
operator.truediv,
|
||||
operator.mod,
|
||||
divmod,
|
||||
operator.pow,
|
||||
operator.lshift,
|
||||
operator.rshift,
|
||||
operator.and_,
|
||||
operator.xor,
|
||||
operator.or_,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('x,y', [(5, 3), (0, 10), (0, 0), (1, 1), ('foo', 'bar'), ('', 'baz!')])
|
||||
@pytest.mark.parametrize(
|
||||
'operation,inplace_operation', [
|
||||
'operation,inplace_operation',
|
||||
[
|
||||
(operator.add, operator.iadd),
|
||||
(operator.sub, operator.isub),
|
||||
(operator.mul, operator.imul),
|
||||
@ -152,7 +161,7 @@ unsupported_operations = {
|
||||
(operator.and_, operator.iand),
|
||||
(operator.xor, operator.ixor),
|
||||
(operator.or_, operator.ior),
|
||||
]
|
||||
],
|
||||
)
|
||||
def test_valueholder_integer_operations(x, y, operation, inplace_operation):
|
||||
v = ValueHolder(x)
|
||||
|
||||
@ -15,4 +15,4 @@ def test_resolve_options():
|
||||
|
||||
|
||||
def test_resolve_transformations():
|
||||
assert _resolve_transformations(('PrettyPrinter', )) == (bonobo.PrettyPrinter, )
|
||||
assert _resolve_transformations(('PrettyPrinter',)) == (bonobo.PrettyPrinter,)
|
||||
|
||||
@ -3,10 +3,7 @@ from bonobo.util.statistics import WithStatistics
|
||||
|
||||
class MyThingWithStats(WithStatistics):
|
||||
def get_statistics(self, *args, **kwargs):
|
||||
return (
|
||||
('foo', 42),
|
||||
('bar', 69),
|
||||
)
|
||||
return (('foo', 42), ('bar', 69))
|
||||
|
||||
|
||||
def test_with_statistics():
|
||||
|
||||
Reference in New Issue
Block a user