Skip to content

Commit

Permalink
Merge pull request #2699 from activeloopai/fy_fix_data
Browse files Browse the repository at this point in the history
Fix `.data()` for empty tensors
  • Loading branch information
FayazRahman committed Nov 21, 2023
2 parents e4ab101 + 30ec665 commit 8eb512c
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 2 deletions.
7 changes: 7 additions & 0 deletions deeplake/api/tests/test_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,3 +245,10 @@ def upload(stuff, ds):
assert ds.list.data()["value"] == items
assert ds.list[0].list() == items[0]
assert ds.list.list() == items


def test_empty_json(memory_ds):
with memory_ds as ds:
ds.create_tensor("json", htype="json")

assert ds.json.data()["value"] == []
7 changes: 7 additions & 0 deletions deeplake/api/tests/test_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,3 +95,10 @@ def test_text_tensor_append(memory_ds):
for i in range(3):
assert ds.x[i].data() == ds2.x[i].data()
assert ds.y[i].data() == ds2.y[i].data()


def test_empty_text(memory_ds):
with memory_ds as ds:
ds.create_tensor("text", htype="text")

assert ds.text.data()["value"] == []
12 changes: 10 additions & 2 deletions deeplake/core/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1437,9 +1437,17 @@ def _extract_value(self, htype: str, fetch_chunks: bool = False):
raise Exception(f"Only supported for {htype} tensors.")

if self.ndim == 1:
return self.numpy(fetch_chunks=fetch_chunks)[0]
data = self.numpy(fetch_chunks=fetch_chunks)
if len(data) == 0:
return []
else:
return data[0]
else:
return [sample[0] for sample in self.numpy(aslist=True)]
data = self.numpy(aslist=True, fetch_chunks=fetch_chunks)
if len(data) == 0:
return []
else:
return [sample[0] for sample in data]

def text(self, fetch_chunks: bool = False):
"""Return text data. Only applicable for tensors with 'text' base htype."""
Expand Down

0 comments on commit 8eb512c

Please sign in to comment.