Skip to content

Compress output image files #30

Compress output image files

Compress output image files #30

Workflow file for this run

name: CI
on: [push, pull_request, workflow_dispatch]
jobs:
test:
runs-on: ubuntu-latest
env:
IMG: exemplar-001-cycle6.ome.tif
PMAP: exemplar-001-cycle6_Probabilities_1.tif
steps:
- uses: actions/checkout@v3
- name: Build the Docker container
run: |
docker build -t s3seg:test .
docker build -t s3seg:test-large large/
# Cache test data to avoid repeated download
- uses: actions/cache@v3
id: cache-data
with:
path: ~/data/*.tif
key: testdata-2022-05-12
# Download test data only if no cache is present
- name: Test data download
if: steps.cache-data.outputs.cache-hit != 'true'
run: |
mkdir ~/data
cd ~/data
curl -f -o $IMG "https://mcmicro.s3.amazonaws.com/ci/$IMG"
curl -f -o $PMAP "https://mcmicro.s3.amazonaws.com/ci/$PMAP"
- name: Test the default container
run: |
cd ~/data
rm -rf exemplar-001-cycle6
docker run -v "$PWD":/data s3seg:test /bin/bash -c "cd /data; \
python /app/S3segmenter.py --imagePath $IMG --stackProbPath $PMAP --outputPath ."
- name: Test the large container
run: |
cd ~/data
rm -rf large
docker run -v "$PWD":/data s3seg:test-large /bin/bash -c "cd /data; \
python /app/S3segmenter.py --imagePath $IMG --stackProbPath $PMAP --outputPath large"
# If the action is successful, the output will be available as a downloadable artifact
- name: Upload processed result
uses: actions/upload-artifact@v2
with:
name: ex001-s3seg
path: |
~/data/exemplar-001-cycle6/**
~/data/large/**