Skip to content

Commit

Permalink
Wasp (#198)
Browse files Browse the repository at this point in the history
* fix rendering of segmentation layer

* read the dataset smartly; better visualization
  • Loading branch information
jingpengw committed Sep 21, 2020
1 parent 5ea93ec commit b7d905e
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 22 deletions.
1 change: 1 addition & 0 deletions chunkflow/chunk/affinity_map/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ def quantize(self):
# if this is affinitymap
image = self[-1, :, :, :]
image = (image * 255).astype(np.uint8)
image = Chunk(image, global_offset=self.global_offset[1:])
return image
11 changes: 10 additions & 1 deletion chunkflow/chunk/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def to_tif(self, file_name: str=None, global_offset: tuple=None):

@classmethod
def from_h5(cls, file_name: str,
dataset_path: str = '/main',
dataset_path: str = None,
cutout_start: tuple=None,
cutout_stop: tuple=None,
cutout_size: tuple=None):
Expand All @@ -124,6 +124,11 @@ def from_h5(cls, file_name: str,
voxel_offset_path = os.path.join(os.path.dirname(file_name),
'global_offset')
with h5py.File(file_name, 'r') as f:
if dataset_path is None:
for key in f.keys():
if 'offset' not in key:
# the first name without offset inside
dataset_path = key
arr = f[dataset_path]

if voxel_offset_path in f:
Expand Down Expand Up @@ -160,6 +165,10 @@ def from_h5(cls, file_name: str,
]

arr = np.asarray(arr)
if arr.dtype == np.dtype('<f4'):
arr = arr.astype('float32')
elif arr.dtype == np.dtype('<f8'):
arr = arr.astype('float64')

print('voxel offset: {}'.format(cutout_start))

Expand Down
3 changes: 1 addition & 2 deletions chunkflow/chunk/image/convnet/inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,8 +425,7 @@ def __call__(self, input_chunk: np.ndarray):
output_chunk = output_buffer.mask_using_last_channel(
threshold = self.mask_myelin_threshold)

# currently neuroglancer only support float32, not float16
if output_chunk.dtype == np.dtype('float16'):
if output_chunk.dtype == np.dtype('<f4'):
output_chunk = output_chunk.astype('float32')

return output_chunk
Expand Down
19 changes: 11 additions & 8 deletions chunkflow/flow/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ def read_tif(tasks, name: str, file_name: str, offset: tuple,
help='read file from local disk.')
@click.option('--file-name', '-f', type=str, required=True,
help='read chunk from file, support .h5')
@click.option('--dataset-path', '-d', type=str, default='/main',
@click.option('--dataset-path', '-d', type=str, default=None, callback=default_none,
help='the dataset path inside HDF5 file.')
@click.option('--cutout-start', '-t', type=int, nargs=3, callback=default_none,
help='cutout voxel offset in the array')
Expand All @@ -440,11 +440,13 @@ def read_h5(tasks, name: str, file_name: str, dataset_path: str,
for task in tasks:
start = time()
assert output_chunk_name not in task
task[output_chunk_name] = Chunk.from_h5(file_name,
dataset_path=dataset_path,
cutout_start=cutout_start,
cutout_stop=cutout_stop,
cutout_size=cutout_size)
task[output_chunk_name] = Chunk.from_h5(
file_name,
dataset_path=dataset_path,
cutout_start=cutout_start,
cutout_stop=cutout_stop,
cutout_size=cutout_size
)
task['log']['timer'][name] = time() - start
yield task

Expand Down Expand Up @@ -1193,13 +1195,14 @@ def neuroglancer(tasks, name, voxel_size, port, chunk_names):

@main.command('quantize')
@click.option('--name', type=str, default='quantize', help='name of this operator')
@click.option('--input-chunk-name', type=str, default='chunk', help = 'input chunk name')
@click.option('--output-chunk-name', type=str, default='chunk', help= 'output chunk name')
@click.option('--input-chunk-name', '-i', type=str, default='chunk', help = 'input chunk name')
@click.option('--output-chunk-name', '-o', type=str, default='chunk', help= 'output chunk name')
@operator
def quantize(tasks, name, input_chunk_name, output_chunk_name):
"""Transorm the last channel to uint8."""
for task in tasks:
aff = task[input_chunk_name]
aff = AffinityMap(aff)
assert isinstance(aff, AffinityMap)
quantized_image = aff.quantize()
task[output_chunk_name] = quantized_image
Expand Down
21 changes: 10 additions & 11 deletions chunkflow/flow/neuroglancer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ def __call__(self, chunks: dict, selected: str=None):
for chunk_name in selected:
chunk = chunks[chunk_name]
global_offset = chunk.global_offset

chunk = np.ascontiguousarray(chunk)
# neuroglancer uses F order

# neuroglancer do not support int type
if np.issubdtype(chunk.dtype, np.int64):
assert chunk.min() >= 0
chunk = chunk.astype(np.uint64)
elif chunk.dtype == np.dtype('<f4'):
elif chunk.dtype == np.dtype('<f4') or chunk.dtype == np.dtype('float16'):
chunk = chunk.astype(np.float32)

if chunk.ndim == 3:
Expand All @@ -57,22 +57,21 @@ def __call__(self, chunks: dict, selected: str=None):
shader = None
else:
shader="""void main () {
emitGrayscale(toNormalized(getDataValue()));
}"""
emitGrayscale(toNormalized(getDataValue()));
}"""
elif chunk.ndim == 4:
chunk = np.transpose(chunk, axes=(0, 3, 2, 1))
dimensions = ng.CoordinateSpace(
scales=(1, *self.voxel_size[::-1]),
units=['', 'nm', 'nm', 'nm'],
names=['c^', 'x', 'y', 'z']
)
shader="""
void main() {
emitRGB(vec3(toNormalized(getDataValue(0)),
toNormalized(getDataValue(1)),
toNormalized(getDataValue(2))));
}
"""
shader="""void main() {
emitRGB(vec3(toNormalized(getDataValue(0)),
toNormalized(getDataValue(1)),
toNormalized(getDataValue(2))));
}
"""
else:
raise ValueError('only support 3/4 dimension volume.')

Expand Down

0 comments on commit b7d905e

Please sign in to comment.