Merge branch 'develop' into develop
This commit is contained in:
@ -1,6 +1,6 @@
|
||||
from bonobo.structs import Bag, Graph, Token
|
||||
from bonobo.nodes import CsvReader, CsvWriter, FileReader, FileWriter, Filter, JsonReader, JsonWriter, Limit, \
|
||||
PrettyPrinter, Tee, count, identity, noop, pprint
|
||||
PrettyPrinter, PickleWriter, PickleReader, Tee, count, identity, noop, pprint
|
||||
from bonobo.strategies import create_strategy
|
||||
from bonobo.util.objects import get_name
|
||||
|
||||
@ -42,7 +42,6 @@ def run(graph, strategy=None, plugins=None, services=None):
|
||||
plugins = plugins or []
|
||||
|
||||
from bonobo import settings
|
||||
|
||||
settings.check()
|
||||
|
||||
if not settings.QUIET: # pragma: no cover
|
||||
@ -98,6 +97,8 @@ register_api_group(
|
||||
JsonWriter,
|
||||
Limit,
|
||||
PrettyPrinter,
|
||||
PickleReader,
|
||||
PickleWriter,
|
||||
Tee,
|
||||
count,
|
||||
identity,
|
||||
|
||||
BIN
bonobo/examples/datasets/spam.tgz
Normal file
BIN
bonobo/examples/datasets/spam.tgz
Normal file
Binary file not shown.
58
bonobo/examples/files/pickle_handlers.py
Normal file
58
bonobo/examples/files/pickle_handlers.py
Normal file
@ -0,0 +1,58 @@
|
||||
import bonobo
|
||||
from fs.tarfs import TarFS
|
||||
import os
|
||||
|
||||
|
||||
def cleanse_sms(row):
|
||||
|
||||
if row['category'] == 'spam':
|
||||
row['sms_clean'] = '**MARKED AS SPAM** ' + row['sms'][0:50] + ('...' if len(row['sms']) > 50 else '')
|
||||
else:
|
||||
row['sms_clean'] = row['sms']
|
||||
|
||||
return row['sms_clean']
|
||||
|
||||
|
||||
graph = bonobo.Graph(
|
||||
bonobo.PickleReader('spam.pkl'), # spam.pkl is within the gzipped tarball
|
||||
cleanse_sms,
|
||||
print
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
'''
|
||||
This example shows how a different file system service can be injected
|
||||
into a transformation (as compressing pickled objects often makes sense
|
||||
anyways). The pickle itself contains a list of lists as follows:
|
||||
|
||||
```
|
||||
[
|
||||
['category', 'sms'],
|
||||
['ham', 'Go until jurong point, crazy..'],
|
||||
['ham', 'Ok lar... Joking wif u oni...'],
|
||||
['spam', 'Free entry in 2 a wkly comp to win...'],
|
||||
['ham', 'U dun say so early hor... U c already then say...'],
|
||||
['ham', 'Nah I don't think he goes to usf, he lives around here though'],
|
||||
['spam', 'FreeMsg Hey there darling it's been 3 week's now...'],
|
||||
...
|
||||
]
|
||||
```
|
||||
|
||||
where the first column categorizes and sms as "ham" or "spam". The second
|
||||
column contains the sms itself.
|
||||
|
||||
Data set taken from:
|
||||
https://www.kaggle.com/uciml/sms-spam-collection-dataset/downloads/sms-spam-collection-dataset.zip
|
||||
|
||||
The transformation (1) reads the pickled data, (2) marks and shortens
|
||||
messages categorized as spam, and (3) prints the output.
|
||||
'''
|
||||
|
||||
services = {
|
||||
'fs': TarFS(
|
||||
os.path.join(bonobo.get_examples_path(), 'datasets', 'spam.tgz')
|
||||
)
|
||||
}
|
||||
bonobo.run(graph, services=services)
|
||||
@ -3,6 +3,7 @@
|
||||
from .file import FileReader, FileWriter
|
||||
from .json import JsonReader, JsonWriter
|
||||
from .csv import CsvReader, CsvWriter
|
||||
from .pickle import PickleReader, PickleWriter
|
||||
|
||||
__all__ = [
|
||||
'CsvReader',
|
||||
@ -11,4 +12,6 @@ __all__ = [
|
||||
'FileWriter',
|
||||
'JsonReader',
|
||||
'JsonWriter',
|
||||
'PickleReader',
|
||||
'PickleWriter',
|
||||
]
|
||||
|
||||
69
bonobo/nodes/io/pickle.py
Normal file
69
bonobo/nodes/io/pickle.py
Normal file
@ -0,0 +1,69 @@
|
||||
import pickle
|
||||
|
||||
from bonobo.config.processors import ContextProcessor
|
||||
from bonobo.config import Option
|
||||
from bonobo.constants import NOT_MODIFIED
|
||||
from bonobo.util.objects import ValueHolder
|
||||
from .file import FileReader, FileWriter, FileHandler
|
||||
|
||||
|
||||
class PickleHandler(FileHandler):
|
||||
"""
|
||||
|
||||
.. attribute:: item_names
|
||||
|
||||
The names of the items in the pickle, if it is not defined in the first item of the pickle.
|
||||
|
||||
"""
|
||||
|
||||
item_names = Option(tuple)
|
||||
|
||||
|
||||
class PickleReader(PickleHandler, FileReader):
|
||||
"""
|
||||
Reads a Python pickle object and yields the items in dicts.
|
||||
"""
|
||||
|
||||
mode = Option(str, default='rb')
|
||||
|
||||
@ContextProcessor
|
||||
def pickle_headers(self, context, fs, file):
|
||||
yield ValueHolder(self.item_names)
|
||||
|
||||
def read(self, fs, file, pickle_headers):
|
||||
data = pickle.load(file)
|
||||
|
||||
# if the data is not iterable, then wrap the object in a list so it may be iterated
|
||||
if isinstance(data, dict):
|
||||
is_dict = True
|
||||
iterator = iter(data.items())
|
||||
else:
|
||||
is_dict = False
|
||||
try:
|
||||
iterator = iter(data)
|
||||
except TypeError:
|
||||
iterator = iter([data])
|
||||
|
||||
if not pickle_headers.get():
|
||||
pickle_headers.set(next(iterator))
|
||||
|
||||
item_count = len(pickle_headers.value)
|
||||
|
||||
for i in iterator:
|
||||
if len(i) != item_count:
|
||||
raise ValueError('Received an object with %d items, expecting %d.' % (len(i), item_count, ))
|
||||
|
||||
yield dict(zip(i)) if is_dict else dict(zip(pickle_headers.value, i))
|
||||
|
||||
|
||||
class PickleWriter(PickleHandler, FileWriter):
|
||||
|
||||
mode = Option(str, default='wb')
|
||||
|
||||
def write(self, fs, file, lineno, item):
|
||||
"""
|
||||
Write a pickled item to the opened file.
|
||||
"""
|
||||
file.write(pickle.dumps(item))
|
||||
lineno += 1
|
||||
return NOT_MODIFIED
|
||||
Reference in New Issue
Block a user