Skip to content

Commit

Permalink
slurm cluster is running, will see whether it is correct or not
Browse files Browse the repository at this point in the history
  • Loading branch information
jingpengw committed Oct 29, 2020
1 parent c6c9650 commit ed91ac4
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 11 deletions.
5 changes: 4 additions & 1 deletion chunkflow/chunk/image/convnet/inferencer.py
Expand Up @@ -60,7 +60,10 @@ def __init__(self,
self.input_size = input_size

if output_crop_margin is None:
self.output_crop_margin = self.output_patch_overlap
if mask_output_chunk:
self.output_crop_margin = (0,0,0)
else:
self.output_crop_margin = self.output_patch_overlap
else:
self.output_crop_margin = output_crop_margin
# we should always crop more than the patch overlap
Expand Down
18 changes: 9 additions & 9 deletions chunkflow/flow/flow.py
Expand Up @@ -227,25 +227,24 @@ def create_info(tasks,layer_path, channel_num, layer_type, data_type, encoding,
type=click.Path(file_okay=True, dir_okay=False, exists=True,
readable=True, resolve_path=True),
help='file contains bounding boxes or tasks.')
@click.option('--task-index', '-i',
@click.option('--job-index', '-i',
type=int, default=None,
help='index of task in the tasks.')
@click.option('--slurm-job-array/--no-slurm-job-array',
default=False, help='use the slurm job array '+
'environment variable to identify task index.')
@click.option('--granularity', '-g',
type=int, default=1, help='number of tasks to do in one run.')
@operator
def fetch_task_from_file(tasks, file_path, task_index, slurm_job_array, granularity):
@generator
def fetch_task_from_file(file_path: str, job_index: int, slurm_job_array: bool, granularity: int):
if(slurm_job_array):
assert os.environ['SLURM_ARRAY_JOB_ID'] == 0
assert os.environ['SLURM_ARRAY_TASK_ID'] >= 0
task_index = os.environ['SLURM_ARRAY_TASK_ID']
assert task_index is not None
job_index = int(os.environ['SLURM_ARRAY_TASK_ID'])
assert job_index is not None

bbox_array = np.load(file_path)
task_stop = min(bbox_array.shape[0], task_index + granularity)
for idx in range(task_index, task_stop):
task_start = job_index * granularity
task_stop = min(bbox_array.shape[0], task_start + granularity)
for idx in range(task_start, task_stop):
bbox = Bbox.from_list(bbox_array[idx, :])
task = get_initial_task()
task['bbox'] = bbox
Expand Down Expand Up @@ -1230,6 +1229,7 @@ def neuroglancer(tasks, name, voxel_size, port, chunk_names):
state['operators'][name](task, selected=chunk_names)
yield task


@main.command('quantize')
@click.option('--name', type=str, default='quantize', help='name of this operator')
@click.option('--input-chunk-name', '-i', type=str, default='chunk', help = 'input chunk name')
Expand Down
3 changes: 3 additions & 0 deletions chunkflow/plugins/mapto01.py
@@ -1,6 +1,9 @@
import numpy as np

def exec(chunk: np.ndarray):
if np.issubdtype(chunk.dtype, np.uint8):
chunk = chunk.astype(np.float32)
chunk = chunk / 255.0
assert np.issubdtype(chunk.dtype, np.float32)
assert chunk.min() >= -1
assert chunk.max() <= 1
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
@@ -1,4 +1,3 @@
python-dateutil==2.8.0
boto3
numpy>=1.16
six>=1.13.0
Expand Down

0 comments on commit ed91ac4

Please sign in to comment.