Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

updates references to Bolt to align with restructuring #368

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
10 changes: 5 additions & 5 deletions test/test_images_io.py
Expand Up @@ -29,11 +29,11 @@ def test_from_array(eng):


def test_from_array_bolt(eng):
# only run this test for Spark mode
if eng is None:
return
a = arange(8).reshape((1, 2, 4))
if eng is not None:
b = barray(a, context=eng)
else:
b = barray(a)
b = barray(a, context=eng)
data = fromarray(b)
assert allclose(data.shape, a.shape)
assert allclose(data.value_shape, a.shape[1:])
Expand Down Expand Up @@ -318,4 +318,4 @@ def test_from_example(eng):
data = fromexample('fish', engine=eng)
assert allclose(data.shape, (20, 2, 76, 87))
data = fromexample('mouse', engine=eng)
assert allclose(data.shape, (20, 64, 64))
assert allclose(data.shape, (20, 64, 64))
8 changes: 4 additions & 4 deletions test/test_series_io.py
Expand Up @@ -20,11 +20,11 @@ def test_from_array(eng):


def test_from_array_bolt(eng):
# only run this test for Spark mode
if eng is None:
return
a = arange(8, dtype='int16').reshape((4, 2))
if eng is not None:
b = barray(a, context=eng)
else:
b = barray(a)
b = barray(a, context=eng)
data = fromarray(b, engine=eng)
assert data.shape == (4, 2)
assert data.dtype == 'int16'
Expand Down
9 changes: 4 additions & 5 deletions thunder/base.py
@@ -1,9 +1,8 @@
from numpy import array, asarray, ndarray, prod, ufunc, add, subtract, \
multiply, divide, isscalar, newaxis, unravel_index, dtype
from bolt.utils import inshape, tupleize, slicify
from bolt.base import BoltArray
from bolt.spark.array import BoltArraySpark
from bolt.spark.chunk import ChunkedArray
from bolt.array.array import BoltArray
from bolt.array.chunk import ChunkedArray
from functools import reduce


Expand Down Expand Up @@ -254,7 +253,7 @@ def labels(self, value):
def astype(self, dtype, casting='unsafe'):
"""
Cast values to the specified type.

Parameters
----------
dtype : str or dtype
Expand Down Expand Up @@ -545,7 +544,7 @@ def func(record):
return k1, op(x, y)

rdd = self.tordd().zip(other.tordd()).map(func)
barray = BoltArraySpark(rdd, shape=self.shape, dtype=self.dtype, split=self.values.split)
barray = BoltArray(rdd, shape=self.shape, dtype=self.dtype, split=self.values.split)
return self._constructor(barray).__finalize__(self)

def plus(self, other):
Expand Down
8 changes: 4 additions & 4 deletions thunder/images/readers.py
Expand Up @@ -36,7 +36,7 @@ def fromrdd(rdd, dims=None, nrecords=None, dtype=None, labels=None, ordered=Fals
Whether or not the rdd is ordered by key
"""
from .images import Images
from bolt.spark.array import BoltArraySpark
from bolt.array.array import BoltArray

if dims is None or dtype is None:
item = rdd.values().first()
Expand All @@ -52,7 +52,7 @@ def process_keys(record):
k = (k,)
return k, v

values = BoltArraySpark(rdd.map(process_keys), shape=(nrecords,) + tuple(dims), dtype=dtype, split=1, ordered=ordered)
values = BoltArray(rdd.map(process_keys), shape=(nrecords,) + tuple(dims), dtype=dtype, split=1, ordered=ordered)
return Images(values, labels=labels)

def fromarray(values, labels=None, npartitions=None, engine=None):
Expand Down Expand Up @@ -80,9 +80,9 @@ def fromarray(values, labels=None, npartitions=None, engine=None):
Computational engine (e.g. a SparkContext for spark)
"""
from .images import Images
import bolt
from bolt.array.array import BoltArray

if isinstance(values, bolt.spark.array.BoltArraySpark):
if isinstance(values, BoltArray):
return Images(values)

values = asarray(values)
Expand Down
8 changes: 4 additions & 4 deletions thunder/series/readers.py
Expand Up @@ -42,7 +42,7 @@ def fromrdd(rdd, nrecords=None, shape=None, index=None, labels=None, dtype=None,
Whether or not the rdd is ordered by key
"""
from .series import Series
from bolt.spark.array import BoltArraySpark
from bolt.array.array import BoltArray

if index is None or dtype is None:
item = rdd.values().first()
Expand All @@ -68,7 +68,7 @@ def process_keys(record):
k = (k,)
return k, v

values = BoltArraySpark(rdd.map(process_keys), shape=shape, dtype=dtype, split=len(shape)-1, ordered=ordered)
values = BoltArray(rdd.map(process_keys), shape=shape, dtype=dtype, split=len(shape)-1, ordered=ordered)
return Series(values, index=index, labels=labels)

def fromarray(values, index=None, labels=None, npartitions=None, engine=None):
Expand Down Expand Up @@ -99,9 +99,9 @@ def fromarray(values, index=None, labels=None, npartitions=None, engine=None):
Computational engine (e.g. a SparkContext for Spark)
"""
from .series import Series
import bolt
from bolt.array.array import BoltArray

if isinstance(values, bolt.spark.array.BoltArraySpark):
if isinstance(values, BoltArray):
return Series(values)

values = asarray(values)
Expand Down