Skip to content
This repository has been archived by the owner on Aug 18, 2020. It is now read-only.

Commit

Permalink
Better summary with short repr
Browse files Browse the repository at this point in the history
  • Loading branch information
sgugger committed Jan 27, 2020
1 parent 083af28 commit 17cb7fe
Show file tree
Hide file tree
Showing 4 changed files with 66 additions and 90 deletions.
14 changes: 11 additions & 3 deletions fastai2/data/block.py
Expand Up @@ -96,14 +96,22 @@ def dataloaders(self, source, path='.', verbose=False, **kwargs):
datasets="Create a `Datasets` object from `source`",
dataloaders="Create a `DataLoaders` object from `source`")

# Cell
def _short_repr(x):
if isinstance(x, tuple): return f'({", ".join([_short_repr(y) for y in x])})'
if isinstance(x, list): return f'[{", ".join([_short_repr(y) for y in x])}]'
if not isinstance(x, Tensor): return str(x)
if x.numel() <= 20 and x.ndim <=1: return str(x)
return f'{x.__class__.__name__} of size {"x".join([str(d) for d in x.shape])}'

# Cell
def _apply_pipeline(p, x):
print(f" {p}\n starting from\n {x}")
print(f" {p}\n starting from\n {_short_repr(x)}")
for f in p.fs:
name = f.name
try:
x = f(x)
if name != "noop": print(f" applying {name} gives\n {str(x)}")
if name != "noop": print(f" applying {name} gives\n {_short_repr(x)}")
except Exception as e:
print(f" applying {name} failed.")
raise e
Expand All @@ -120,7 +128,7 @@ def summary(self: DataBlock, source, bs=4, **kwargs):

dls = self.dataloaders(source, verbose=True)
print("\nBuilding one batch")
if len([f for f in dls.train.before_batch.fs if f.name != 'noop'])!=0:
if len([f for f in dls.train.after_item.fs if f.name != 'noop'])!=0:
print("Applying item_tfms to the first sample:")
s = [_apply_pipeline(dls.train.after_item, dsets.train[0])]
print(f"\nAdding the next {bs-1} samples")
Expand Down
4 changes: 2 additions & 2 deletions nbs/05_data.transforms.ipynb
Expand Up @@ -1464,8 +1464,8 @@
"Converted 38_tutorial.ulmfit.ipynb.\n",
"Converted 40_tabular.core.ipynb.\n",
"Converted 41_tabular.data.ipynb.\n",
"Converted 42_tabular.learner.ipynb.\n",
"Converted 43_tabular.model.ipynb.\n",
"Converted 42_tabular.model.ipynb.\n",
"Converted 43_tabular.learner.ipynb.\n",
"Converted 45_collab.ipynb.\n",
"Converted 50_datablock_examples.ipynb.\n",
"Converted 60_medical.imaging.ipynb.\n",
Expand Down
49 changes: 38 additions & 11 deletions nbs/06_data.block.ipynb
Expand Up @@ -236,7 +236,7 @@
"text/markdown": [
"<h4 id=\"DataBlock.datasets\" class=\"doc_header\"><code>DataBlock.datasets</code><a href=\"__main__.py#L40\" class=\"source_link\" style=\"float:right\">[source]</a></h4>\n",
"\n",
"> <code>DataBlock.datasets</code>(**`source`**)\n",
"> <code>DataBlock.datasets</code>(**`source`**, **`verbose`**=*`False`*)\n",
"\n",
"Create a [`Datasets`](/data.core#Datasets) object from `source`"
],
Expand All @@ -260,9 +260,9 @@
{
"data": {
"text/markdown": [
"<h4 id=\"DataBlock.dataloaders\" class=\"doc_header\"><code>DataBlock.dataloaders</code><a href=\"__main__.py#L46\" class=\"source_link\" style=\"float:right\">[source]</a></h4>\n",
"<h4 id=\"DataBlock.dataloaders\" class=\"doc_header\"><code>DataBlock.dataloaders</code><a href=\"__main__.py#L47\" class=\"source_link\" style=\"float:right\">[source]</a></h4>\n",
"\n",
"> <code>DataBlock.dataloaders</code>(**`source`**, **`path`**=*`'.'`*)\n",
"> <code>DataBlock.dataloaders</code>(**`source`**, **`path`**=*`'.'`*, **`verbose`**=*`False`*)\n",
"\n",
"Create a [`DataLoaders`](/data.core#DataLoaders) object from `source`"
],
Expand Down Expand Up @@ -356,6 +356,34 @@
"## Debugging"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#export\n",
"def _short_repr(x):\n",
" if isinstance(x, tuple): return f'({\", \".join([_short_repr(y) for y in x])})'\n",
" if isinstance(x, list): return f'[{\", \".join([_short_repr(y) for y in x])}]'\n",
" if not isinstance(x, Tensor): return str(x)\n",
" if x.numel() <= 20 and x.ndim <=1: return str(x)\n",
" return f'{x.__class__.__name__} of size {\"x\".join([str(d) for d in x.shape])}'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#hide\n",
"test_eq(_short_repr(TensorImage(torch.randn(40,56))), 'TensorImage of size 40x56')\n",
"test_eq(_short_repr(TensorCategory([1,2,3])), 'TensorCategory([1, 2, 3])')\n",
"test_eq(_short_repr((TensorImage(torch.randn(40,56)), TensorImage(torch.randn(32,20)))),\n",
" '(TensorImage of size 40x56, TensorImage of size 32x20)')"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -364,12 +392,12 @@
"source": [
"#export\n",
"def _apply_pipeline(p, x):\n",
" print(f\" {p}\\n starting from\\n {x}\")\n",
" print(f\" {p}\\n starting from\\n {_short_repr(x)}\")\n",
" for f in p.fs: \n",
" name = f.name\n",
" try: \n",
" x = f(x)\n",
" if name != \"noop\": print(f\" applying {name} gives\\n {str(x)}\")\n",
" if name != \"noop\": print(f\" applying {name} gives\\n {_short_repr(x)}\")\n",
" except Exception as e: \n",
" print(f\" applying {name} failed.\")\n",
" raise e\n",
Expand All @@ -393,7 +421,7 @@
" \n",
" dls = self.dataloaders(source, verbose=True)\n",
" print(\"\\nBuilding one batch\")\n",
" if len([f for f in dls.train.before_batch.fs if f.name != 'noop'])!=0:\n",
" if len([f for f in dls.train.after_item.fs if f.name != 'noop'])!=0:\n",
" print(\"Applying item_tfms to the first sample:\")\n",
" s = [_apply_pipeline(dls.train.after_item, dsets.train[0])]\n",
" print(f\"\\nAdding the next {bs-1} samples\")\n",
Expand Down Expand Up @@ -431,9 +459,8 @@
"text/markdown": [
"<h4 id=\"DataBlock.summary\" class=\"doc_header\"><code>DataBlock.summary</code><a href=\"__main__.py#L2\" class=\"source_link\" style=\"float:right\">[source]</a></h4>\n",
"\n",
"> <code>DataBlock.summary</code>()\n",
"\n",
"Prints a summary of the `Pipeline`s used in `self`"
"> <code>DataBlock.summary</code>(**`source`**, **`bs`**=*`4`*, **\\*\\*`kwargs`**)\n",
"\n"
],
"text/plain": [
"<IPython.core.display.Markdown object>"
Expand Down Expand Up @@ -505,8 +532,8 @@
"Converted 38_tutorial.ulmfit.ipynb.\n",
"Converted 40_tabular.core.ipynb.\n",
"Converted 41_tabular.data.ipynb.\n",
"Converted 42_tabular.learner.ipynb.\n",
"Converted 43_tabular.model.ipynb.\n",
"Converted 42_tabular.model.ipynb.\n",
"Converted 43_tabular.learner.ipynb.\n",
"Converted 45_collab.ipynb.\n",
"Converted 50_datablock_examples.ipynb.\n",
"Converted 60_medical.imaging.ipynb.\n",
Expand Down

0 comments on commit 17cb7fe

Please sign in to comment.