Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Explore and view Neuroglancer Precomputed image volumes #6716

Merged
merged 29 commits into from
Feb 9, 2023
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
45a558e
WIP: Precomputed
frcroth Dec 20, 2022
b1b387e
--wip-- [skip ci]
frcroth Jan 5, 2023
a8bf9ea
Merge branch 'master' into precomputed
frcroth Jan 5, 2023
06e7b63
WIP: Separate header and scaleheader
frcroth Jan 9, 2023
5f3737c
Fix metadata reading
frcroth Jan 10, 2023
786fd8f
Implement reading of precomputed data sets
frcroth Jan 13, 2023
93b2132
Kill all listeners
frcroth Jan 13, 2023
4f48a8b
Format
frcroth Jan 13, 2023
2b80411
WIP: Fix precomputed reading
frcroth Jan 19, 2023
2d0e09b
WIP
frcroth Jan 31, 2023
97f7119
Pretty print
frcroth Feb 2, 2023
ee73fe1
Remove unused code
frcroth Feb 2, 2023
173efcd
Merge branch 'master' into precomputed
frcroth Feb 2, 2023
d482060
Fix FileSystem things introduced by merge
frcroth Feb 2, 2023
0470031
Implement exploration of precomputed datasets
frcroth Feb 3, 2023
f7d9f75
Revert minor changes
frcroth Feb 3, 2023
b23e858
Remove handling for specific google cloud url schema
frcroth Feb 3, 2023
0479ef4
Fix compile warnings
frcroth Feb 3, 2023
44265df
Create precomputed segmentation layer when type=segmentation
frcroth Feb 3, 2023
3b1968b
Apply suggestions from code review
frcroth Feb 6, 2023
9291caa
Add changelog entry
frcroth Feb 6, 2023
40ec8ec
Merge branch 'master' into precomputed
frcroth Feb 6, 2023
2e13a2b
Update docs
frcroth Feb 6, 2023
1870367
Adjust frontend
frcroth Feb 6, 2023
0fa9b30
Prevent exploration of sharded data
frcroth Feb 6, 2023
053b047
Format backend
frcroth Feb 6, 2023
d7908e2
Merge branch 'master' into precomputed
frcroth Feb 7, 2023
a99072b
Explain some things better
frcroth Feb 7, 2023
838db4b
Merge branch 'master' into precomputed
frcroth Feb 9, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion app/models/binary/explore/ExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ package models.binary.explore
import com.scalableminds.util.geometry.{Vec3Double, Vec3Int}
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5SegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
PrecomputedDataLayer,
PrecomputedSegmentationLayer
}
import com.scalableminds.webknossos.datastore.dataformats.zarr._
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header
import com.scalableminds.webknossos.datastore.datareaders.zarr._
Expand Down Expand Up @@ -130,6 +134,12 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService)
case l: N5SegmentationLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case l: PrecomputedDataLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case l: PrecomputedSegmentationLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case _ => throw new Exception("Encountered unsupported layer format during explore remote")
}
})
Expand All @@ -156,7 +166,12 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService)
remotePath,
credentialId.map(_.toString),
reportMutable,
List(new ZarrArrayExplorer, new NgffExplorer, new N5ArrayExplorer, new N5MultiscalesExplorer))
List(new ZarrArrayExplorer,
new NgffExplorer,
new N5ArrayExplorer,
new N5MultiscalesExplorer,
new PrecomputedExplorer)
)
} yield layersWithVoxelSizes

private def normalizeUri(uri: String): String =
Expand Down
64 changes: 64 additions & 0 deletions app/models/binary/explore/PrecomputedExplorer.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
package models.binary.explore
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
PrecomputedDataLayer,
PrecomputedLayer,
PrecomputedSegmentationLayer
}
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.datareaders.precomputed.{PrecomputedHeader, PrecomputedScale}
import com.scalableminds.webknossos.datastore.models.datasource.{Category, ElementClass}

import java.nio.file.Path
import scala.concurrent.ExecutionContext.Implicits.global

class PrecomputedExplorer extends RemoteLayerExplorer {
override def name: String = "Neuroglancer Precomputed"

override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(PrecomputedLayer, Vec3Double)]] =
for {
infoPath <- Fox.successful(remotePath.resolve(PrecomputedHeader.METADATA_PATH))
precomputedHeader <- parseJsonFromPath[PrecomputedHeader](infoPath) ?~> s"Failed to read Precomputed metadata at $infoPath"
layerAndVoxelSize <- layerFromPrecomputedHeader(precomputedHeader, remotePath, credentialId)
} yield List(layerAndVoxelSize)

private def layerFromPrecomputedHeader(precomputedHeader: PrecomputedHeader,
remotePath: Path,
credentialId: Option[String]): Fox[(PrecomputedLayer, Vec3Double)] =
for {
name <- guessNameFromPath(remotePath)
boundingBox <- BoundingBox.fromSize(precomputedHeader.scales.head.size).toFox
frcroth marked this conversation as resolved.
Show resolved Hide resolved
elementClass: ElementClass.Value <- elementClassFromPrecomputedDataType(precomputedHeader.data_type) ?~> "Unknown data type"
smallestResolution = precomputedHeader.scales.head.resolution
frcroth marked this conversation as resolved.
Show resolved Hide resolved
voxelSize <- Vec3Int.fromList(smallestResolution.toList).toFox
frcroth marked this conversation as resolved.
Show resolved Hide resolved
mags: Seq[MagLocator] <- Fox.serialCombined(precomputedHeader.scales)(
frcroth marked this conversation as resolved.
Show resolved Hide resolved
getMagFromScale(_, smallestResolution, remotePath, credentialId))
layer = if (precomputedHeader.describesSegmentationLayer) {
PrecomputedSegmentationLayer(name, boundingBox, elementClass, mags.toList, None)
} else PrecomputedDataLayer(name, boundingBox, Category.color, elementClass, mags.toList)
} yield (layer, Vec3Double.fromVec3Int(voxelSize))

private def elementClassFromPrecomputedDataType(precomputedDataType: String): Fox[ElementClass.Value] =
precomputedDataType.toLowerCase match {
case "uint8" => Some(ElementClass.uint8)
case "uint16" => Some(ElementClass.uint16)
case "uint32" => Some(ElementClass.uint32)
case "uint64" => Some(ElementClass.uint64)
case "float32" => Some(ElementClass.float)
case _ => None
}

private def getMagFromScale(scale: PrecomputedScale,
minimalResolution: Array[Int],
remotePath: Path,
credentialId: Option[String]): Fox[MagLocator] = {
val normalizedResolution = (scale.resolution, minimalResolution).zipped.map((r, m) => r / m)
for {
mag <- Vec3Int.fromList(normalizedResolution.toList)
path = remotePath.resolve(scale.key)
axisOrder = AxisOrder(0, 1, 2)
frcroth marked this conversation as resolved.
Show resolved Hide resolved
} yield MagLocator(mag, Some(path.toString), None, Some(axisOrder), channelIndex = None, credentialId)
}
}
4 changes: 2 additions & 2 deletions conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -293,5 +293,5 @@ pidfile.path = "/dev/null"


# uncomment these lines for faster restart during local backend development (but beware the then-missing features):
#slick.checkSchemaOnStartup = false
#play.modules.disabled += "play.modules.swagger.SwaggerModule"
slick.checkSchemaOnStartup = false
play.modules.disabled += "play.modules.swagger.SwaggerModule"
53 changes: 53 additions & 0 deletions frontend/javascripts/types/schemas/datasource.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,56 @@ export default {
},
required: ["dataFormat", "mags"],
},
"types::DataLayerPrecomputedPartial": {
title: "DataLayerPrecomputed",
type: "object",
properties: {
dataFormat: {
const: "precomputed",
},
boundingBox: {
$ref: "#/definitions/types::BoundingBox",
},
numChannels: {
type: "number",
},
mags: {
type: "array",
items: {
type: "object",
properties: {
mag: {
anyOf: [
{
type: "number",
},
{
$ref: "#/definitions/types::Vector3",
},
],
},
path: {
type: "string",
},
credentials: {
type: "object",
properties: {
user: { type: "string" },
password: { type: "string" },
},
required: ["user", "password"],
},
axisOrder: {
type: "object",
additionalProperties: { type: "number" },
},
},
required: ["mag"],
},
},
},
required: ["dataFormat", "mags"],
},
"types::DataLayer": {
title: "DataLayer",
allOf: [
Expand Down Expand Up @@ -246,6 +296,9 @@ export default {
{
$ref: "#/definitions/types::DataLayerN5Partial",
},
{
$ref: "#/definitions/types::DataLayerPrecomputedPartial",
},
],
},
],
Expand Down
5 changes: 4 additions & 1 deletion frontend/javascripts/types/schemas/datasource.types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ type DataLayerZarrPartial = BaseRemoteLayer & {
type DataLayerN5Partial = BaseRemoteLayer & {
dataFormat: "n5";
};
type DataLayerPrecomputedPartial = BaseRemoteLayer & {
dataFormat: "precomputed";
};
export type DataLayer = {
name: string;
category: "color" | "segmentation";
Expand All @@ -59,7 +62,7 @@ export type DataLayer = {
mappings: Array<string>;
}
) &
(DataLayerWKWPartial | DataLayerZarrPartial | DataLayerN5Partial);
(DataLayerWKWPartial | DataLayerZarrPartial | DataLayerN5Partial | DataLayerPrecomputedPartial);
export type DatasourceConfiguration = {
id: {
name: string;
Expand Down
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,9 @@
"build": "node --max-old-space-size=4096 node_modules/.bin/webpack --env production",
"build-dev": "node_modules/.bin/webpack",
"build-watch": "node_modules/.bin/webpack -w",
"listening": "lsof -i:7155,9000,9001,9002",
"kill-listeners": "kill $(lsof -t -i:7155,9000,9001,9002)",
"listening": "lsof -i:5005,7155,9000,9001,9002",
"kill-listeners": "kill -9 $(lsof -t -i:5005,7155,9000,9001,9002)",
"rm-lock": "rm fossildb/data/LOCK",
frcroth marked this conversation as resolved.
Show resolved Hide resolved
"test": "tools/test.sh test --timeout=30s",
"test-changed": "tools/test.sh test-changed --timeout=30s",
"test-verbose": "xvfb-run -s '-ac -screen 0 1280x1024x24' tools/test.sh test --timeout=60s --verbose",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,5 +118,11 @@ object BoundingBox {
None
}

def fromSize(size: Array[Int]): Option[BoundingBox] =
frcroth marked this conversation as resolved.
Show resolved Hide resolved
size.length match {
case 3 => Some(BoundingBox(Vec3Int(0, 0, 0), size(0), size(1), size(2)))
case _ => None
}

implicit val jsonFormat: OFormat[BoundingBox] = Json.format[BoundingBox]
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ object Vec3Double {
case _: NumberFormatException => None
}

def fromVec3Int(v: Vec3Int) =
Vec3Double(v.x, v.y, v.z)

implicit object Vector3DReads extends Format[Vec3Double] {
def reads(json: JsValue): JsResult[Vec3Double] = json match {
case JsArray(ts) if ts.size == 3 =>
Expand Down
6 changes: 3 additions & 3 deletions util/src/main/scala/com/scalableminds/util/tools/Fox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -207,15 +207,15 @@ object Fox extends FoxImplicits {
} yield ()

def chainFunctions[T](functions: List[T => Fox[T]])(implicit ec: ExecutionContext): T => Fox[T] = {
def runNext(remainingFunctions: List[T => Fox[T]], previousRestult: T): Fox[T] =
def runNext(remainingFunctions: List[T => Fox[T]], previousResult: T): Fox[T] =
frcroth marked this conversation as resolved.
Show resolved Hide resolved
remainingFunctions match {
case head :: tail =>
for {
currentResult <- head(previousRestult)
currentResult <- head(previousResult)
nextResult <- runNext(tail, currentResult)
} yield nextResult
case Nil =>
Fox.successful(previousRestult)
Fox.successful(previousResult)
}
t =>
runNext(functions, t)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package com.scalableminds.webknossos.datastore.dataformats.precomputed

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.requestlogging.RateLimitedErrorLogging
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.precomputed.PrecomputedArray
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.FileSystemService
import com.typesafe.scalalogging.LazyLogging
import net.liftweb.common.{Empty, Failure, Full}
import net.liftweb.util.Helpers.tryo

import java.nio.file.Path
import scala.concurrent.ExecutionContext

class PrecomputedCubeHandle(precomputedArray: PrecomputedArray)
extends DataCubeHandle
with LazyLogging
with RateLimitedErrorLogging {

def cutOutBucket(bucket: BucketPosition)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.voxelXInMag, bucket.voxelYInMag, bucket.voxelZInMag)
precomputedArray.readBytesXYZ(shape, offset).recover {
case t: Throwable => logError(t); Failure(t.getMessage, Full(t), Empty)
}
}

override protected def onFinalize(): Unit = ()

}

class PrecomputedBucketProvider(layer: PrecomputedLayer, val fileSystemServiceOpt: Option[FileSystemService])
extends BucketProvider
with LazyLogging
with RateLimitedErrorLogging {

override def loadFromUnderlying(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[PrecomputedCubeHandle] = {
val precomputedMagOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

precomputedMagOpt match {
case None => Fox.empty
case Some(precomputedMag) =>
fileSystemServiceOpt match {
case Some(fileSystemService: FileSystemService) =>
for {
magPath: Path <- if (precomputedMag.isRemote) {
for {
remoteSource <- fileSystemService.remoteSourceFor(precomputedMag)
remotePath <- remotePathFrom(remoteSource)
frcroth marked this conversation as resolved.
Show resolved Hide resolved
} yield remotePath
} else localPathFrom(readInstruction, precomputedMag.pathWithFallback)
cubeHandle <- tryo(onError = e => logError(e))(
PrecomputedArray.open(magPath, precomputedMag.axisOrder, precomputedMag.channelIndex))
.map(new PrecomputedCubeHandle(_))
} yield cubeHandle
case None => Empty
}

}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package com.scalableminds.webknossos.datastore.dataformats.precomputed

import com.scalableminds.util.geometry.{BoundingBox, Vec3Int}
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration
import com.scalableminds.webknossos.datastore.models.datasource.{
Category,
DataFormat,
DataLayer,
ElementClass,
SegmentationLayer
}
import com.scalableminds.webknossos.datastore.storage.FileSystemService
import play.api.libs.json.{Json, OFormat}

trait PrecomputedLayer extends DataLayer {

val dataFormat: DataFormat.Value = DataFormat.precomputed

def bucketProvider(fileSystemServiceOpt: Option[FileSystemService]) =
new PrecomputedBucketProvider(this, fileSystemServiceOpt)

def resolutions: List[Vec3Int] = mags.map(_.mag)

def mags: List[MagLocator]

def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = Int.MaxValue // Prevents the wkw-shard-specific handle caching

def numChannels: Option[Int] = Some(if (elementClass == ElementClass.uint24) 3 else 1)
}

case class PrecomputedDataLayer(
name: String,
boundingBox: BoundingBox,
category: Category.Value,
elementClass: ElementClass.Value,
mags: List[MagLocator],
defaultViewConfiguration: Option[LayerViewConfiguration] = None,
adminViewConfiguration: Option[LayerViewConfiguration] = None,
override val numChannels: Option[Int] = Some(1)
) extends PrecomputedLayer

object PrecomputedDataLayer {
implicit val jsonFormat: OFormat[PrecomputedDataLayer] = Json.format[PrecomputedDataLayer]
}

case class PrecomputedSegmentationLayer(
name: String,
boundingBox: BoundingBox,
elementClass: ElementClass.Value,
mags: List[MagLocator],
largestSegmentId: Option[Long],
mappings: Option[Set[String]] = None,
defaultViewConfiguration: Option[LayerViewConfiguration] = None,
adminViewConfiguration: Option[LayerViewConfiguration] = None,
override val numChannels: Option[Int] = Some(1)
) extends SegmentationLayer
with PrecomputedLayer

object PrecomputedSegmentationLayer {
implicit val jsonFormat: OFormat[PrecomputedSegmentationLayer] = Json.format[PrecomputedSegmentationLayer]
}
Loading