From 6b65572f2f54ab0f4d6f96a3382c49c4a2d0f98a Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jan 2024 15:12:21 +0100 Subject: [PATCH 01/14] ND data loading with other axis orders --- .../dataformats/BucketProvider.scala | 11 ++- .../dataformats/DataCubeHandle.scala | 12 --- .../dataformats/DatasetArrayHandle.scala | 22 +++++ .../dataformats/n5/N5BucketProvider.scala | 24 +---- .../PrecomputedBucketProvider.scala | 20 +--- .../dataformats/wkw/WKWBucketProvider.scala | 20 +--- .../dataformats/zarr/ZarrBucketProvider.scala | 29 +----- .../zarr3/Zarr3BucketProvider.scala | 24 +---- .../datastore/datareaders/AxisOrder.scala | 16 ++-- .../datastore/datareaders/DatasetArray.scala | 94 ++++++++++--------- .../datareaders/MultiArrayUtils.scala | 2 +- .../datastore/datareaders/wkw/WKWArray.scala | 4 +- .../datareaders/zarr3/Zarr3Array.scala | 4 +- .../models/datasource/DataLayer.scala | 6 -- .../datastore/storage/DataCubeCache.scala | 10 +- 15 files changed, 122 insertions(+), 176 deletions(-) delete mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DataCubeHandle.scala create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala index 609e21ad9c1..b0662567928 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala @@ -14,19 +14,20 @@ trait BucketProvider extends FoxImplicits with LazyLogging { def remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService] // To be defined in subclass. - def openShardOrArrayHandle(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[DataCubeHandle] = + def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = Empty def load(readInstruction: DataReadInstruction, cache: DataCubeCache)( implicit ec: ExecutionContext): Fox[Array[Byte]] = - cache.withCache(readInstruction)(openShardOrArrayHandleWithTimeout)( + cache.withCache(readInstruction)(openDatasetArrayHandleWithTimeout)( _.cutOutBucket(readInstruction.bucket, readInstruction.dataLayer)) - private def openShardOrArrayHandleWithTimeout(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[DataCubeHandle] = { + private def openDatasetArrayHandleWithTimeout(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val t = System.currentTimeMillis for { - result <- openShardOrArrayHandle(readInstruction).futureBox + result <- openDatasetArrayHandle(readInstruction).futureBox duration = System.currentTimeMillis - t _ = if (duration > 500) { val className = this.getClass.getName.split("\\.").last diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DataCubeHandle.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DataCubeHandle.scala deleted file mode 100644 index 9aee27d3b8e..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DataCubeHandle.scala +++ /dev/null @@ -1,12 +0,0 @@ -package com.scalableminds.webknossos.datastore.dataformats - -import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.DataLayer - -import scala.concurrent.ExecutionContext - -// To be implemented as handle for a cube (e.g. may correspond to one 1GB wkw file) -trait DataCubeHandle extends SafeCachable { - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala new file mode 100644 index 00000000000..d91277a41d3 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala @@ -0,0 +1,22 @@ +package com.scalableminds.webknossos.datastore.dataformats + +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.datareaders.DatasetArray +import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray +import com.scalableminds.webknossos.datastore.models.BucketPosition +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} + +import scala.concurrent.ExecutionContext + +class DatasetArrayHandle(datasetArray: DatasetArray) extends SafeCachable { + def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { + val shape = Vec3Int.full(bucket.bucketLength) + val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) + + datasetArray.readBytesWithAdditionalCoordinates(shape, + offset, + bucket.additionalCoordinates, + dataLayer.elementClass == ElementClass.uint24) + } +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala index 71b464345cc..4d75a1dd7f1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala @@ -1,13 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats.n5 import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging @@ -16,18 +14,6 @@ import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext -class N5CubeHandle(n5Array: N5Array) extends DataCubeHandle with LazyLogging { - - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val shape = Vec3Int.full(bucket.bucketLength) - val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) - n5Array.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24) - } - - override protected def onFinalize(): Unit = () - -} - class N5BucketProvider(layer: N5Layer, dataSourceId: DataSourceId, val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], @@ -35,8 +21,8 @@ class N5BucketProvider(layer: N5Layer, extends BucketProvider with LazyLogging { - override def openShardOrArrayHandle(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[N5CubeHandle] = { + override def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val magLocatorOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) @@ -58,7 +44,7 @@ class N5BucketProvider(layer: N5Layer, magLocator.axisOrder, magLocator.channelIndex, chunkContentsCache) - .map(new N5CubeHandle(_)) + .map(new DatasetArrayHandle(_)) } yield cubeHandle case None => Empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala index d46632b2522..25b1f14ece1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.precomputed import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.precomputed.PrecomputedArray import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.BucketPosition @@ -16,18 +16,6 @@ import net.liftweb.common.Empty import scala.concurrent.ExecutionContext import ucar.ma2.{Array => MultiArray} -class PrecomputedCubeHandle(precomputedArray: PrecomputedArray) extends DataCubeHandle with LazyLogging { - - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val shape = Vec3Int.full(bucket.bucketLength) - val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) - precomputedArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24) - } - - override protected def onFinalize(): Unit = () - -} - class PrecomputedBucketProvider(layer: PrecomputedLayer, dataSourceId: DataSourceId, val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], @@ -35,8 +23,8 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer, extends BucketProvider with LazyLogging { - override def openShardOrArrayHandle(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[PrecomputedCubeHandle] = { + override def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val magLocatorOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) @@ -58,7 +46,7 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer, magLocator.axisOrder, magLocator.channelIndex, chunkContentsCache) - .map(new PrecomputedCubeHandle(_)) + .map(new DatasetArrayHandle(_)) } yield cubeHandle case None => Empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala index 33f4ec181e5..b64963f59f8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.wkw.WKWArray import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.BucketPosition @@ -16,18 +16,6 @@ import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext -class WKWCubeHandle(wkwArray: WKWArray) extends DataCubeHandle with LazyLogging { - - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val shape = Vec3Int.full(bucket.bucketLength) - val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) - wkwArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24) - } - - override protected def onFinalize(): Unit = () - -} - class WKWBucketProvider(layer: WKWLayer, dataSourceId: DataSourceId, val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], @@ -35,8 +23,8 @@ class WKWBucketProvider(layer: WKWLayer, extends BucketProvider with LazyLogging { - override def openShardOrArrayHandle(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[WKWCubeHandle] = { + override def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val magLocatorOpt: Option[MagLocator] = layer.wkwResolutions.find(_.resolution == readInstruction.bucket.mag).map(wkwResolutionToMagLocator) @@ -53,7 +41,7 @@ class WKWBucketProvider(layer: WKWLayer, chunkContentsCache <- sharedChunkContentsCache.toFox cubeHandle <- WKWArray .open(magPath, dataSourceId, layer.name, chunkContentsCache) - .map(new WKWCubeHandle(_)) + .map(new DatasetArrayHandle(_)) } yield cubeHandle case None => Empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala index fb11a7479d8..b05f8fc6847 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala @@ -1,13 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats.zarr import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging @@ -16,23 +14,6 @@ import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext -class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLogging { - - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val shape = Vec3Int.full(bucket.bucketLength) - val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) - - bucket.additionalCoordinates match { - case Some(additionalCoordinates) if additionalCoordinates.nonEmpty => - zarrArray.readBytesWithAdditionalCoordinates(shape, offset, additionalCoordinates, dataLayer.additionalAxisMap) - case _ => zarrArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24) - } - } - - override protected def onFinalize(): Unit = () - -} - class ZarrBucketProvider(layer: ZarrLayer, dataSourceId: DataSourceId, val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], @@ -40,8 +21,8 @@ class ZarrBucketProvider(layer: ZarrLayer, extends BucketProvider with LazyLogging { - override def openShardOrArrayHandle(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[ZarrCubeHandle] = { + override def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val magLocatorOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) @@ -63,7 +44,7 @@ class ZarrBucketProvider(layer: ZarrLayer, magLocator.axisOrder, magLocator.channelIndex, chunkContentsCache) - .map(new ZarrCubeHandle(_)) + .map(new DatasetArrayHandle(_)) } yield cubeHandle case None => Empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala index fd7597d7c35..862822e237c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala @@ -1,13 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats.zarr3 import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator} +import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging @@ -16,18 +14,6 @@ import net.liftweb.common.Empty import scala.concurrent.ExecutionContext import ucar.ma2.{Array => MultiArray} -class ZarrCubeHandle(zarrArray: Zarr3Array) extends DataCubeHandle with LazyLogging { - - def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val shape = Vec3Int.full(bucket.bucketLength) - val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag) - zarrArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24) - } - - override protected def onFinalize(): Unit = () - -} - class Zarr3BucketProvider(layer: Zarr3Layer, dataSourceId: DataSourceId, val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], @@ -35,8 +21,8 @@ class Zarr3BucketProvider(layer: Zarr3Layer, extends BucketProvider with LazyLogging { - override def openShardOrArrayHandle(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[ZarrCubeHandle] = { + override def openDatasetArrayHandle(readInstruction: DataReadInstruction)( + implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = { val magLocatorOpt: Option[MagLocator] = layer.mags.find(_.mag == readInstruction.bucket.mag) @@ -58,7 +44,7 @@ class Zarr3BucketProvider(layer: Zarr3Layer, magLocator.axisOrder, magLocator.channelIndex, chunkContentsCache) - .map(new ZarrCubeHandle(_)) + .map(new DatasetArrayHandle(_)) } yield cubeHandle case None => Empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 13db1029bb8..cc6e97d0a04 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -2,6 +2,8 @@ package com.scalableminds.webknossos.datastore.datareaders import play.api.libs.json.{Json, OFormat} +// Defines the axis order of a DatasetArray. Note that this ignores transpose codecs/ArrayOrder.F/C. +// Those will have to be applied on individual chunk’s contents. case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { def hasZAxis: Boolean = z.isDefined @@ -12,7 +14,7 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { case None => Math.max(Math.max(x, y), c.getOrElse(-1)) + 1 } - def permutation(rank: Int): Array[Int] = + def wkToArrayPermutation(rank: Int): Array[Int] = c match { case Some(channel) => ((0 until (rank - 4)).toList :+ channel :+ x :+ y :+ zWithFallback).toArray @@ -20,20 +22,20 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { ((0 until (rank - 3)).toList :+ x :+ y :+ zWithFallback).toArray } - def inversePermutation(rank: Int): Array[Int] = { + def arrayToWkPermutation(rank: Int): Array[Int] = { val permutationMutable: Array[Int] = Array.fill(rank)(0) - permutation(rank).zipWithIndex.foreach { + wkToArrayPermutation(rank).zipWithIndex.foreach { case (p, i) => permutationMutable(p) = i } permutationMutable } - def permuteIndices(indices: Array[Int]): Array[Int] = - permutation(indices.length).map(indices(_)) + def permuteIndicesWkToArray(indices: Array[Int]): Array[Int] = + wkToArrayPermutation(indices.length).map(indices(_)) - def permuteIndicesReverse(indices: Array[Int]): Array[Int] = - inversePermutation(indices.length).map(indices(_)) + def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = + arrayToWkPermutation(indices.length).map(indices(_)) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index c922c550318..807bbce8f22 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -21,6 +21,8 @@ class DatasetArray(vaultPath: VaultPath, dataSourceId: DataSourceId, layerName: String, header: DatasetHeader, + // axisOrder and additionalAxes match those from “outer” metadata, and can directly be used to compute chunk indices. + // For each chunk’s contents, additionally the transpose codecs/ArrayOrder.C/F from the DatasetHeader must be applied) axisOrder: AxisOrder, channelIndex: Option[Int], additionalAxes: Option[Seq[AdditionalAxis]], @@ -29,6 +31,13 @@ class DatasetArray(vaultPath: VaultPath, protected lazy val chunkReader: ChunkReader = new ChunkReader(header) + private lazy val additionalAxesMap: Map[String, AdditionalAxis] = + additionalAxes match { + case Some(additionalAxis) => + additionalAxis.map(additionalAxis => (additionalAxis.name -> additionalAxis)).toMap + case None => Map() + } + // Helper variables to allow reading 2d datasets as 3d datasets with depth 1 lazy val rank: Int = if (axisOrder.hasZAxis) { @@ -40,7 +49,7 @@ class DatasetArray(vaultPath: VaultPath, lazy val datasetShape: Option[Array[Int]] = if (axisOrder.hasZAxis) { header.datasetShape } else { - header.datasetShape.map(size => size :+ 1) + header.datasetShape.map(shape => shape :+ 1) } lazy val chunkShape: Array[Int] = if (axisOrder.hasZAxis) { @@ -51,11 +60,11 @@ class DatasetArray(vaultPath: VaultPath, private def chunkShapeAtIndex(index: Array[Int]) = if (axisOrder.hasZAxis) { header.chunkShapeAtIndex(index) } else { - chunkShape // irregular sized chunk indexes are currently not supported for 2d datasets + chunkShape // irregular shaped chunk indexes are currently not supported for 2d datasets } // Returns byte array in fortran-order with little-endian values - def readBytesXYZ(size: Vec3Int, offset: Vec3Int, shouldReadUint24: Boolean = false)( + def readBytesXYZ(shape: Vec3Int, offset: Vec3Int, shouldReadUint24: Boolean = false)( implicit ec: ExecutionContext): Fox[Array[Byte]] = { val paddingDimensionsCount = rank - 3 val offsetArray = channelIndex match { @@ -63,48 +72,49 @@ class DatasetArray(vaultPath: VaultPath, Array.fill(paddingDimensionsCount - 1)(0) :+ c :+ offset.x :+ offset.y :+ offset.z case _ => Array.fill(paddingDimensionsCount)(0) :+ offset.x :+ offset.y :+ offset.z } - val sizeArray = if (shouldReadUint24 && rank >= 4) { - Array.fill(paddingDimensionsCount - 1)(1) :+ 3 :+ size.x :+ size.y :+ size.z + val shapeArray = if (shouldReadUint24 && rank >= 4) { + Array.fill(paddingDimensionsCount - 1)(1) :+ 3 :+ shape.x :+ shape.y :+ shape.z } else { - Array.fill(paddingDimensionsCount)(1) :+ size.x :+ size.y :+ size.z + Array.fill(paddingDimensionsCount)(1) :+ shape.x :+ shape.y :+ shape.z } - readBytes(sizeArray, offsetArray) + readBytes(shapeArray, offsetArray) } def readBytesWithAdditionalCoordinates( - size: Vec3Int, - offset: Vec3Int, - additionalCoordinates: Seq[AdditionalCoordinate], - additionalAxesMap: Map[String, AdditionalAxis])(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val dimensionCount = 3 + (if (channelIndex.isDefined) 1 else 0) + additionalAxesMap.size - - /* - readAsFortranOrder only supports a size/offset with XYZ at the end. This does not really make sense if we assume - that xyz and additional coordinates may have any index/axisorder. Since only ngff datasets are currently supported - for additional coordinates, and they follow the convention (t)(c) ... zyx, with additional coordinates before zyx, - this works for now. - */ - - val shapeArray: Array[Int] = Array.fill(dimensionCount)(1) - shapeArray(dimensionCount - 3) = size.x - shapeArray(dimensionCount - 2) = size.y - shapeArray(dimensionCount - 1) = size.z - - val offsetArray: Array[Int] = Array.fill(dimensionCount)(0) - offsetArray(dimensionCount - 3) = offset.x - offsetArray(dimensionCount - 2) = offset.y - offsetArray(dimensionCount - 1) = offset.z - - channelIndex match { - case Some(c) => offsetArray(axisOrder.c.getOrElse(axisOrder.x - 1)) = c - case None => () + shapeXYZ: Vec3Int, + offsetXYZ: Vec3Int, + additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], + shouldReadUint24: Boolean = false)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { + + val shapeArray: Array[Int] = Array.fill(rank)(1) + shapeArray(rank - 3) = shapeXYZ.x + shapeArray(rank - 2) = shapeXYZ.y + shapeArray(rank - 1) = shapeXYZ.z + + val offsetArray: Array[Int] = Array.fill(rank)(0) + offsetArray(rank - 3) = offsetXYZ.x + offsetArray(rank - 2) = offsetXYZ.y + offsetArray(rank - 1) = offsetXYZ.z + + axisOrder.c.foreach { channelAxisInner => + val channelAxisOuter = axisOrder.arrayToWkPermutation(rank)(channelAxisInner) + // If a channelIndex is requested, and a channel axis is known, add an offset to the channel axis + channelIndex.foreach { requestedChannelOffset => + offsetArray(channelAxisOuter) = requestedChannelOffset + } + // If uint24 is to be read, increase channel axis shape value from 1 to 3 + if (shouldReadUint24) { + shapeArray(channelAxisOuter) = 3 + } } - for (additionalCoordinate <- additionalCoordinates) { - val index = additionalAxesMap(additionalCoordinate.name).index - offsetArray(index) = additionalCoordinate.value - // shapeArray for additional coordinates will always be 1 + additionalCoordinatesOpt.foreach { additionalCoordinates => + for (additionalCoordinate <- additionalCoordinates) { + val index = additionalAxesMap(additionalCoordinate.name).index + offsetArray(index) = additionalCoordinate.value + // shapeArray at positions of additional coordinates is always 1 + } } readBytes(shapeArray, offsetArray) } @@ -122,14 +132,14 @@ class DatasetArray(vaultPath: VaultPath, private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( implicit ec: ExecutionContext): Fox[MultiArray] = { val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0) - val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(axisOrder.permuteIndicesReverse), - axisOrder.permuteIndicesReverse(chunkShape), + val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(axisOrder.permuteIndicesArrayToWk), + axisOrder.permuteIndicesArrayToWk(chunkShape), shape, totalOffset) if (partialCopyingIsNotNeeded(shape, totalOffset, chunkIndices)) { for { chunkIndex <- chunkIndices.headOption.toFox - sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndices(chunkIndex), + sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndicesWkToArray(chunkIndex), useSkipTypingShortcut = true) } yield sourceChunk } else { @@ -138,7 +148,7 @@ class DatasetArray(vaultPath: VaultPath, val targetInCOrder: MultiArray = MultiArrayUtils.orderFlippedView(targetMultiArray) val copiedFuture = Fox.combined(chunkIndices.map { chunkIndex: Array[Int] => for { - sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndices(chunkIndex)) + sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndicesWkToArray(chunkIndex)) offsetInChunk = computeOffsetInChunk(chunkIndex, totalOffset) sourceChunkInCOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, axisOrder, @@ -212,7 +222,7 @@ class DatasetArray(vaultPath: VaultPath, private def computeOffsetInChunk(chunkIndex: Array[Int], globalOffset: Array[Int]): Array[Int] = chunkIndex.indices.map { dim => - globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesReverse(chunkShape)(dim)) + globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesArrayToWk(chunkShape)(dim)) }.toArray override def toString: String = diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index 94f1b8ce9ad..580dbbe8fad 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -133,7 +133,7 @@ object MultiArrayUtils { * For all cases we could test, the two are identical. Beware of this when debugging future datasets, * e.g. with axis order ZXY */ - val permutation = axisOrder.permutation(source.getRank) + val permutation = axisOrder.wkToArrayPermutation(source.getRank) val flippedIfNeeded = if (flip) permutation.reverse else permutation source.permute(flippedIfNeeded) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index a645932c281..f23ef3833fc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -124,8 +124,8 @@ class WKWArray(vaultPath: VaultPath, private def chunkIndexToShardIndex(chunkIndex: Array[Int]) = ChunkUtils.computeChunkIndices( - header.datasetShape.map(axisOrder.permuteIndicesReverse), - axisOrder.permuteIndicesReverse(header.shardShape), + header.datasetShape.map(axisOrder.permuteIndicesArrayToWk), + axisOrder.permuteIndicesArrayToWk(header.shardShape), header.chunkShape, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index c84d11be898..0e70af314f4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -157,8 +157,8 @@ class Zarr3Array(vaultPath: VaultPath, private def chunkIndexToShardIndex(chunkIndex: Array[Int]) = ChunkUtils.computeChunkIndices( - header.datasetShape.map(axisOrder.permuteIndicesReverse), - axisOrder.permuteIndicesReverse(header.outerChunkShape), + header.datasetShape.map(axisOrder.permuteIndicesArrayToWk), + axisOrder.permuteIndicesArrayToWk(header.outerChunkShape), header.chunkShape, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 328baf05cee..b89b948155a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -171,12 +171,6 @@ trait DataLayerLike { // n-dimensional datasets = 3-dimensional datasets with additional coordinate axes def additionalAxes: Option[Seq[AdditionalAxis]] - def additionalAxisMap: Map[String, AdditionalAxis] = - additionalAxes match { - case Some(additionalAxis) => - additionalAxis.map(additionalAxis => (additionalAxis.name -> additionalAxis)).toMap - case None => Map() - } } object DataLayerLike { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataCubeCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataCubeCache.scala index e9f11eb899e..3c50bef3f76 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataCubeCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataCubeCache.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.storage import com.scalableminds.util.cache.LRUConcurrentCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.DataCubeHandle +import com.scalableminds.webknossos.datastore.dataformats.DatasetArrayHandle import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Failure, Full} @@ -35,7 +35,7 @@ object CachedCube { } class DataCubeCache(val maxEntries: Int) - extends LRUConcurrentCache[CachedCube, Fox[DataCubeHandle]] + extends LRUConcurrentCache[CachedCube, Fox[DatasetArrayHandle]] with FoxImplicits with LazyLogging { @@ -43,8 +43,8 @@ class DataCubeCache(val maxEntries: Int) * Loads the due to x,y and z defined block into the cache array and * returns it. */ - def withCache[T](readInstruction: DataReadInstruction)(loadF: DataReadInstruction => Fox[DataCubeHandle])( - f: DataCubeHandle => Fox[T])(implicit ec: ExecutionContext): Fox[T] = { + def withCache[T](readInstruction: DataReadInstruction)(loadF: DataReadInstruction => Fox[DatasetArrayHandle])( + f: DatasetArrayHandle => Fox[T])(implicit ec: ExecutionContext): Fox[T] = { val cachedCubeInfo = CachedCube.from(readInstruction) def handleUncachedCube(): Fox[T] = { @@ -84,6 +84,6 @@ class DataCubeCache(val maxEntries: Int) } } - override def onElementRemoval(key: CachedCube, value: Fox[DataCubeHandle]): Unit = + override def onElementRemoval(key: CachedCube, value: Fox[DatasetArrayHandle]): Unit = value.map(_.scheduleForRemoval()) } From d4da76d6e48d3fa6f4e3a6ef05ed909d2bb53260 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jan 2024 15:33:29 +0100 Subject: [PATCH 02/14] unused imports --- .../dataformats/DatasetArrayHandle.scala | 1 - .../PrecomputedBucketProvider.scala | 4 +--- .../dataformats/wkw/WKWBucketProvider.scala | 4 +--- .../datastore/datareaders/DatasetArray.scala | 18 ------------------ 4 files changed, 2 insertions(+), 25 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala index d91277a41d3..ad112fb89e9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayHandle.scala @@ -3,7 +3,6 @@ package com.scalableminds.webknossos.datastore.dataformats import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.datareaders.DatasetArray -import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala index 25b1f14ece1..9e958d669f5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala @@ -1,13 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats.precomputed import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.precomputed.PrecomputedArray import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala index b64963f59f8..7f0785afab2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala @@ -1,13 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator} import com.scalableminds.webknossos.datastore.datareaders.wkw.WKWArray import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 807bbce8f22..8e580d1b9fa 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -63,24 +63,6 @@ class DatasetArray(vaultPath: VaultPath, chunkShape // irregular shaped chunk indexes are currently not supported for 2d datasets } - // Returns byte array in fortran-order with little-endian values - def readBytesXYZ(shape: Vec3Int, offset: Vec3Int, shouldReadUint24: Boolean = false)( - implicit ec: ExecutionContext): Fox[Array[Byte]] = { - val paddingDimensionsCount = rank - 3 - val offsetArray = channelIndex match { - case Some(c) if rank >= 4 => - Array.fill(paddingDimensionsCount - 1)(0) :+ c :+ offset.x :+ offset.y :+ offset.z - case _ => Array.fill(paddingDimensionsCount)(0) :+ offset.x :+ offset.y :+ offset.z - } - val shapeArray = if (shouldReadUint24 && rank >= 4) { - Array.fill(paddingDimensionsCount - 1)(1) :+ 3 :+ shape.x :+ shape.y :+ shape.z - } else { - Array.fill(paddingDimensionsCount)(1) :+ shape.x :+ shape.y :+ shape.z - } - - readBytes(shapeArray, offsetArray) - } - def readBytesWithAdditionalCoordinates( shapeXYZ: Vec3Int, offsetXYZ: Vec3Int, From 9552bce6d82a656415ad3460e02a50237780d60b Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jan 2024 17:34:59 +0100 Subject: [PATCH 03/14] wip: unified axis order --- .../dataformats/n5/N5BucketProvider.scala | 1 + .../PrecomputedBucketProvider.scala | 1 + .../dataformats/zarr/ZarrBucketProvider.scala | 1 + .../zarr3/Zarr3BucketProvider.scala | 1 + .../datastore/datareaders/AxisOrder.scala | 8 ++++++ .../datastore/datareaders/ChunkUtils.scala | 4 +++ .../datastore/datareaders/DatasetArray.scala | 27 ++++++++++++++++++- .../datastore/datareaders/n5/N5Array.scala | 3 ++- .../precomputed/PrecomputedArray.scala | 3 ++- .../datareaders/zarr/ZarrArray.scala | 19 +++++++------ .../datareaders/zarr3/Zarr3Array.scala | 3 ++- .../volume/Zarr3BucketStreamSink.scala | 2 +- 12 files changed, 60 insertions(+), 13 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala index 4d75a1dd7f1..c2470c3a5f5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/n5/N5BucketProvider.scala @@ -43,6 +43,7 @@ class N5BucketProvider(layer: N5Layer, layer.name, magLocator.axisOrder, magLocator.channelIndex, + layer.additionalAxes, chunkContentsCache) .map(new DatasetArrayHandle(_)) } yield cubeHandle diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala index 9e958d669f5..d1a5d5913e3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala @@ -43,6 +43,7 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer, layer.name, magLocator.axisOrder, magLocator.channelIndex, + layer.additionalAxes, chunkContentsCache) .map(new DatasetArrayHandle(_)) } yield cubeHandle diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala index b05f8fc6847..2699914197f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr/ZarrBucketProvider.scala @@ -43,6 +43,7 @@ class ZarrBucketProvider(layer: ZarrLayer, layer.name, magLocator.axisOrder, magLocator.channelIndex, + layer.additionalAxes, chunkContentsCache) .map(new DatasetArrayHandle(_)) } yield cubeHandle diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala index 862822e237c..52ee7d76ee8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/zarr3/Zarr3BucketProvider.scala @@ -43,6 +43,7 @@ class Zarr3BucketProvider(layer: Zarr3Layer, layer.name, magLocator.axisOrder, magLocator.channelIndex, + layer.additionalAxes, chunkContentsCache) .map(new DatasetArrayHandle(_)) } yield cubeHandle diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index cc6e97d0a04..60c974f2aa2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -2,6 +2,14 @@ package com.scalableminds.webknossos.datastore.datareaders import play.api.libs.json.{Json, OFormat} +case class Axis(name: String, index: Int) + +class FullAxisOrder(axes: Seq[Axis]) + +object FullAxisOrder { + def fromAxisOrderAndAdditionalCoordinates: FullAxisOrder = ??? +} + // Defines the axis order of a DatasetArray. Note that this ignores transpose codecs/ArrayOrder.F/C. // Those will have to be applied on individual chunk’s contents. case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala index b40ecfe8f73..6f28086cdea 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders +import com.scalableminds.webknossos.datastore.datareaders.ChunkUtils.logger import com.typesafe.scalalogging.LazyLogging object ChunkUtils extends LazyLogging { @@ -40,6 +41,9 @@ object ChunkUtils extends LazyLogging { dimIndex = -1 } } + logger.info(s"selected offset: ${selectedOffset.mkString(",")}, selectedShape: ${selectedShape + .mkString(",")} arrayShapeOpt ${arrayShapeOpt.map(_.mkString(","))} arrayChunkShape ${arrayChunkShape.mkString( + ",")}. returning chunkIndices ${chunkIndices.toList.map(_.mkString(","))}") chunkIndices.toList } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 8e580d1b9fa..8173ff7f123 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -108,11 +108,36 @@ class DatasetArray(vaultPath: VaultPath, asBytes <- BytesConverter.toByteArray(typedMultiArray, header.resolvedDataType, ByteOrder.LITTLE_ENDIAN) } yield asBytes + private def printAsInner(values: Array[Int]): String = { + val axisNames = Array.fill(rank)("") + axisNames(axisOrder.x) = "x" + axisNames(axisOrder.y) = "y" + axisOrder.z.foreach { zIndex => + axisNames(zIndex) = "z" + } + axisOrder.c.foreach { cIndex => + axisNames(cIndex) = "c" + } + additionalAxes.getOrElse(Seq.empty).foreach { axis => + axisNames(axis.index) = axis.name + } + val raw = axisNames + .zip(values) + .map { tuple => + f"${tuple._1}=${tuple._2}" + } + .mkString(",") + f"inner($raw)" + } + // Read from array. Note that shape and offset should be passed in XYZ order, left-padded with 0 and 1 respectively. // This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned. - private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( implicit ec: ExecutionContext): Fox[MultiArray] = { + logger.info(s"reading shape ${shape.mkString(",")} at ${offset.mkString(",")}") + logger.info(s"ds shape: ${datasetShape + .map(printAsInner)} (permuted to outer(${datasetShape.map(axisOrder.permuteIndicesArrayToWk).map(_.mkString(","))})), chunk shape: ${printAsInner( + chunkShape)} (permuted to outer(${axisOrder.permuteIndicesArrayToWk(chunkShape).mkString(",")}))") val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0) val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(axisOrder.permuteIndicesArrayToWk), axisOrder.permuteIndicesArrayToWk(chunkShape), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala index 4405c322774..1d26dd1bc1f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala @@ -19,6 +19,7 @@ object N5Array extends LazyLogging { layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[N5Array] = for { headerBytes <- (path / N5Header.FILENAME_ATTRIBUTES_JSON) @@ -32,7 +33,7 @@ object N5Array extends LazyLogging { header, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), channelIndex, - None, + additionalAxes, sharedChunkContentsCache) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 9fe68dc01a8..50541d163cc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -25,6 +25,7 @@ object PrecomputedArray extends LazyLogging { layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = for { headerBytes <- (magPath.parent / PrecomputedHeader.FILENAME_INFO) @@ -41,7 +42,7 @@ object PrecomputedArray extends LazyLogging { scaleHeader, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(scaleHeader.rank)), channelIndex, - None, + additionalAxes, sharedChunkContentsCache ) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala index 5fa476fdbfa..5d3f6cf3076 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala @@ -18,6 +18,7 @@ object ZarrArray extends LazyLogging { layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[ZarrArray] = for { headerBytes <- (path / ZarrHeader.FILENAME_DOT_ZARRAY) @@ -25,14 +26,16 @@ object ZarrArray extends LazyLogging { header <- JsonHelper.parseAndValidateJson[ZarrHeader](headerBytes) ?~> "Could not parse array header" _ <- DatasetArray.assertChunkSizeLimit(header.bytesPerChunk) } yield - new ZarrArray(path, - dataSourceId, - layerName, - header, - axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), - channelIndex, - None, - sharedChunkContentsCache) + new ZarrArray( + path, + dataSourceId, + layerName, + header, + axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), + channelIndex, + additionalAxes, + sharedChunkContentsCache + ) } class ZarrArray(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index 0e70af314f4..f79b1f112ad 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -19,6 +19,7 @@ object Zarr3Array extends LazyLogging { layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[Zarr3Array] = for { headerBytes <- (path / Zarr3ArrayHeader.FILENAME_ZARR_JSON) @@ -31,7 +32,7 @@ object Zarr3Array extends LazyLogging { header, axisOrderOpt.getOrElse(AxisOrder.asCxyzFromRank(header.rank)), channelIndex, - None, + additionalAxes, sharedChunkContentsCache) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 7c5cfa4987e..8806f506505 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -107,7 +107,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay layer.boundingBox, layer.elementClass, magLocators.toList, - additionalAxes = additionalAxes)), + additionalAxes = additionalAxes)), // TODO their indexes are no longer right scale = voxelSize.getOrElse(Vec3Double.ones) // Download should still be available if the dataset no longer exists. In that case, the voxel size is unknown ) } From c7121565c383a78e6f7f89de5e0c01064a66a7c2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jan 2024 12:01:42 +0100 Subject: [PATCH 04/14] FullAxisOrder from AxisOrder and AdditionalCoordinates --- .../datastore/datareaders/AxisOrder.scala | 37 +++++++++++++++---- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 60c974f2aa2..6fb3e2f4cf9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -1,15 +1,8 @@ package com.scalableminds.webknossos.datastore.datareaders +import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import play.api.libs.json.{Json, OFormat} -case class Axis(name: String, index: Int) - -class FullAxisOrder(axes: Seq[Axis]) - -object FullAxisOrder { - def fromAxisOrderAndAdditionalCoordinates: FullAxisOrder = ??? -} - // Defines the axis order of a DatasetArray. Note that this ignores transpose codecs/ArrayOrder.F/C. // Those will have to be applied on individual chunk’s contents. case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { @@ -45,6 +38,11 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = arrayToWkPermutation(indices.length).map(indices(_)) + def length: Int = { + val lengthOfZ = if (z.isDefined) 1 else 0 + val lengthOfC = if (c.isDefined) 1 else 0 + lengthOfC + 2 + lengthOfZ + } } object AxisOrder { @@ -66,3 +64,26 @@ object AxisOrder { def cxyz: AxisOrder = asCxyzFromRank(rank = 4) implicit val jsonFormat: OFormat[AxisOrder] = Json.format[AxisOrder] } + +case class Axis(name: String) + +case class FullAxisOrder(axes: Seq[Axis]) + +object FullAxisOrder { + def fromAxisOrderAndAdditionalAxes(axisOrder: AxisOrder, + additionalAxes: Option[Seq[AdditionalAxis]]): FullAxisOrder = { + val asArray: Array[Axis] = Array.fill(additionalAxes.map(_.length).getOrElse(0) + axisOrder.length)(Axis("")) + asArray(axisOrder.x) = Axis("x") + asArray(axisOrder.y) = Axis("y") + axisOrder.c.foreach { c => + asArray(c) = Axis("c") + } + axisOrder.z.foreach { z => + asArray(z) = Axis("z") + } + for (additionalAxis <- additionalAxes.getOrElse(Seq.empty)) { + asArray(additionalAxis.index) = Axis(additionalAxis.name) + } + FullAxisOrder(asArray.toVector) + } +} From f801e7059e98ab2462a26ceb774f70b340689d08 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jan 2024 14:30:22 +0100 Subject: [PATCH 05/14] construct fullAxisOrder, remove unused stuff --- .../datastore/datareaders/AxisOrder.scala | 81 +++++++++++++------ .../datastore/datareaders/DatasetArray.scala | 58 +++++++------ .../datareaders/MultiArrayUtils.scala | 12 +-- .../datastore/datareaders/wkw/WKWArray.scala | 4 +- .../datareaders/zarr3/Zarr3Array.scala | 4 +- 5 files changed, 95 insertions(+), 64 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 6fb3e2f4cf9..92f2e12d97c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -15,31 +15,8 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { case None => Math.max(Math.max(x, y), c.getOrElse(-1)) + 1 } - def wkToArrayPermutation(rank: Int): Array[Int] = - c match { - case Some(channel) => - ((0 until (rank - 4)).toList :+ channel :+ x :+ y :+ zWithFallback).toArray - case None => - ((0 until (rank - 3)).toList :+ x :+ y :+ zWithFallback).toArray - } - - def arrayToWkPermutation(rank: Int): Array[Int] = { - val permutationMutable: Array[Int] = Array.fill(rank)(0) - wkToArrayPermutation(rank).zipWithIndex.foreach { - case (p, i) => - permutationMutable(p) = i - } - permutationMutable - } - - def permuteIndicesWkToArray(indices: Array[Int]): Array[Int] = - wkToArrayPermutation(indices.length).map(indices(_)) - - def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = - arrayToWkPermutation(indices.length).map(indices(_)) - def length: Int = { - val lengthOfZ = if (z.isDefined) 1 else 0 + val lengthOfZ = 1 // if z is None, we append it an as adapter val lengthOfC = if (c.isDefined) 1 else 0 lengthOfC + 2 + lengthOfZ } @@ -67,7 +44,52 @@ object AxisOrder { case class Axis(name: String) -case class FullAxisOrder(axes: Seq[Axis]) +case class FullAxisOrder(axes: Seq[Axis]) { + + override def toString: String = axes.map(_.name).mkString("") + def toStringWk: String = + axesWk.map(_.name).mkString("") + + def axesWk: Array[Axis] = arrayToWkPermutation.map(axes) + + lazy val rank: Int = axes.length + + lazy val arrayToWkPermutation: Array[Int] = { + // wk is always the additionalAxes + (c)zxy + val permutationMutable: Array[Int] = Array.fill(axes.length)(0) + + var additionalAxisIndex = 0 + axes.zipWithIndex.foreach { + case (axis, index) => + axis.name match { + case "z" => permutationMutable(rank - 1) = index + case "y" => permutationMutable(rank - 2) = index + case "x" => permutationMutable(rank - 3) = index + case "c" => permutationMutable(rank - 4) = index + case _ => + permutationMutable(additionalAxisIndex) = index + additionalAxisIndex += 1 + } + } + permutationMutable + } + + def wkToArrayPermutation: Array[Int] = { + val permutationMutable: Array[Int] = Array.fill(arrayToWkPermutation.length)(0) + arrayToWkPermutation.zipWithIndex.foreach { + case (p, i) => + permutationMutable(p) = i + } + permutationMutable + } + + def permuteIndicesWkToArray(indices: Array[Int]): Array[Int] = + wkToArrayPermutation.map(indices(_)) + + def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = + arrayToWkPermutation.map(indices(_)) + +} object FullAxisOrder { def fromAxisOrderAndAdditionalAxes(axisOrder: AxisOrder, @@ -84,6 +106,15 @@ object FullAxisOrder { for (additionalAxis <- additionalAxes.getOrElse(Seq.empty)) { asArray(additionalAxis.index) = Axis(additionalAxis.name) } + if (!axisOrder.hasZAxis) { + asArray(asArray.length) = Axis("z") // Adapter for reading 2D datasets + } FullAxisOrder(asArray.toVector) } + + // Use only for debugging/developing, with single-char axis names, no duplicates! + @deprecated + def fromString(axisOrderLiteral: String): FullAxisOrder = + FullAxisOrder(axisOrderLiteral.map(char => Axis(name = char.toString))) + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 8173ff7f123..cc0d498909c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -29,6 +29,9 @@ class DatasetArray(vaultPath: VaultPath, sharedChunkContentsCache: AlfuCache[String, MultiArray]) extends LazyLogging { + protected lazy val fullAxisOrder: FullAxisOrder = + FullAxisOrder.fromAxisOrderAndAdditionalAxes(axisOrder, additionalAxes) + protected lazy val chunkReader: ChunkReader = new ChunkReader(header) private lazy val additionalAxesMap: Map[String, AdditionalAxis] = @@ -80,7 +83,7 @@ class DatasetArray(vaultPath: VaultPath, offsetArray(rank - 1) = offsetXYZ.z axisOrder.c.foreach { channelAxisInner => - val channelAxisOuter = axisOrder.arrayToWkPermutation(rank)(channelAxisInner) + val channelAxisOuter = fullAxisOrder.arrayToWkPermutation(channelAxisInner) // If a channelIndex is requested, and a channel axis is known, add an offset to the channel axis channelIndex.foreach { requestedChannelOffset => offsetArray(channelAxisOuter) = requestedChannelOffset @@ -93,7 +96,7 @@ class DatasetArray(vaultPath: VaultPath, additionalCoordinatesOpt.foreach { additionalCoordinates => for (additionalCoordinate <- additionalCoordinates) { - val index = additionalAxesMap(additionalCoordinate.name).index + val index = fullAxisOrder.arrayToWkPermutation(additionalAxesMap(additionalCoordinate.name).index) offsetArray(index) = additionalCoordinate.value // shapeArray at positions of additional coordinates is always 1 } @@ -109,18 +112,7 @@ class DatasetArray(vaultPath: VaultPath, } yield asBytes private def printAsInner(values: Array[Int]): String = { - val axisNames = Array.fill(rank)("") - axisNames(axisOrder.x) = "x" - axisNames(axisOrder.y) = "y" - axisOrder.z.foreach { zIndex => - axisNames(zIndex) = "z" - } - axisOrder.c.foreach { cIndex => - axisNames(cIndex) = "c" - } - additionalAxes.getOrElse(Seq.empty).foreach { axis => - axisNames(axis.index) = axis.name - } + val axisNames = fullAxisOrder.axes.map(_.name) val raw = axisNames .zip(values) .map { tuple => @@ -130,23 +122,35 @@ class DatasetArray(vaultPath: VaultPath, f"inner($raw)" } + private def printAsOuter(values: Array[Int]): String = { + val axisNames = fullAxisOrder.axesWk.map(_.name) + val raw = axisNames + .zip(values) + .map { tuple => + f"${tuple._1}=${tuple._2}" + } + .mkString(",") + f"outer($raw)" + } + // Read from array. Note that shape and offset should be passed in XYZ order, left-padded with 0 and 1 respectively. // This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned. private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( implicit ec: ExecutionContext): Fox[MultiArray] = { - logger.info(s"reading shape ${shape.mkString(",")} at ${offset.mkString(",")}") - logger.info(s"ds shape: ${datasetShape - .map(printAsInner)} (permuted to outer(${datasetShape.map(axisOrder.permuteIndicesArrayToWk).map(_.mkString(","))})), chunk shape: ${printAsInner( - chunkShape)} (permuted to outer(${axisOrder.permuteIndicesArrayToWk(chunkShape).mkString(",")}))") + logger.info(s"full order: $fullAxisOrder") + logger.info(s"reading shape ${printAsOuter(shape)} at ${printAsOuter(offset)}") + logger.info( + s"ds shape: ${datasetShape.map(printAsInner)} (permuted to outer(${datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk).map(printAsOuter)})), chunk shape: ${printAsInner( + chunkShape)} (permuted to outer(${printAsOuter(fullAxisOrder.permuteIndicesArrayToWk(chunkShape))}))") val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0) - val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(axisOrder.permuteIndicesArrayToWk), - axisOrder.permuteIndicesArrayToWk(chunkShape), + val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), + fullAxisOrder.permuteIndicesArrayToWk(chunkShape), shape, totalOffset) if (partialCopyingIsNotNeeded(shape, totalOffset, chunkIndices)) { for { chunkIndex <- chunkIndices.headOption.toFox - sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndicesWkToArray(chunkIndex), + sourceChunk: MultiArray <- getSourceChunkDataWithCache(fullAxisOrder.permuteIndicesWkToArray(chunkIndex), useSkipTypingShortcut = true) } yield sourceChunk } else { @@ -155,10 +159,10 @@ class DatasetArray(vaultPath: VaultPath, val targetInCOrder: MultiArray = MultiArrayUtils.orderFlippedView(targetMultiArray) val copiedFuture = Fox.combined(chunkIndices.map { chunkIndex: Array[Int] => for { - sourceChunk: MultiArray <- getSourceChunkDataWithCache(axisOrder.permuteIndicesWkToArray(chunkIndex)) + sourceChunk: MultiArray <- getSourceChunkDataWithCache(fullAxisOrder.permuteIndicesWkToArray(chunkIndex)) offsetInChunk = computeOffsetInChunk(chunkIndex, totalOffset) sourceChunkInCOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, - axisOrder, + fullAxisOrder, flip = header.order != ArrayOrder.C) _ <- tryo(MultiArrayUtils.copyRange(offsetInChunk, sourceChunkInCOrder, targetInCOrder)) ?~> formatCopyRangeError( offsetInChunk, @@ -183,9 +187,11 @@ class DatasetArray(vaultPath: VaultPath, s"${dataSourceId}__${layerName}__${vaultPath}__chunk_${chunkIndex.mkString(",")}" private def getSourceChunkDataWithCache(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean = false)( - implicit ec: ExecutionContext): Fox[MultiArray] = + implicit ec: ExecutionContext): Fox[MultiArray] = { + logger.info(s"reading chunk ${printAsInner(chunkIndex)}") sharedChunkContentsCache.getOrLoad(chunkContentsCacheKey(chunkIndex), _ => readSourceChunkData(chunkIndex, useSkipTypingShortcut)) + } private def readSourceChunkData(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean)( implicit ec: ExecutionContext): Fox[MultiArray] = @@ -229,11 +235,11 @@ class DatasetArray(vaultPath: VaultPath, private def computeOffsetInChunk(chunkIndex: Array[Int], globalOffset: Array[Int]): Array[Int] = chunkIndex.indices.map { dim => - globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesArrayToWk(chunkShape)(dim)) + globalOffset(dim) - (chunkIndex(dim) * fullAxisOrder.permuteIndicesArrayToWk(chunkShape)(dim)) }.toArray override def toString: String = - s"${getClass.getCanonicalName} {axisOrder=$axisOrder shape=${header.datasetShape.mkString(",")} chunks=${header.chunkShape.mkString( + s"${getClass.getCanonicalName} {fullAxisOrder=$fullAxisOrder shape=${header.datasetShape.mkString(",")} chunkShape=${header.chunkShape.mkString( ",")} dtype=${header.resolvedDataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.summary}}" } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index 580dbbe8fad..2e7d8eef467 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -125,15 +125,9 @@ object MultiArrayUtils { source.permute(permutation) } - def axisOrderXYZView(source: MultiArray, axisOrder: AxisOrder, flip: Boolean): MultiArray = { - /* create a view in which the last three axes are XYZ, rest unchanged - * optionally flip the axes afterwards - * - * Note that we are at this point unsure if this function should be using the *inverse* permutation. - * For all cases we could test, the two are identical. Beware of this when debugging future datasets, - * e.g. with axis order ZXY - */ - val permutation = axisOrder.wkToArrayPermutation(source.getRank) + def axisOrderXYZView(source: MultiArray, fullAxisOrder: FullAxisOrder, flip: Boolean): MultiArray = { + // create a view in which the last axes are (c)XYZ, the rest are the additional axes + val permutation = fullAxisOrder.arrayToWkPermutation val flippedIfNeeded = if (flip) permutation.reverse else permutation source.permute(flippedIfNeeded) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index f23ef3833fc..8f7f9c2a62b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -124,8 +124,8 @@ class WKWArray(vaultPath: VaultPath, private def chunkIndexToShardIndex(chunkIndex: Array[Int]) = ChunkUtils.computeChunkIndices( - header.datasetShape.map(axisOrder.permuteIndicesArrayToWk), - axisOrder.permuteIndicesArrayToWk(header.shardShape), + header.datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), + fullAxisOrder.permuteIndicesArrayToWk(header.shardShape), header.chunkShape, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index f79b1f112ad..b4390e09f1d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -158,8 +158,8 @@ class Zarr3Array(vaultPath: VaultPath, private def chunkIndexToShardIndex(chunkIndex: Array[Int]) = ChunkUtils.computeChunkIndices( - header.datasetShape.map(axisOrder.permuteIndicesArrayToWk), - axisOrder.permuteIndicesArrayToWk(header.outerChunkShape), + header.datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), + fullAxisOrder.permuteIndicesArrayToWk(header.outerChunkShape), header.chunkShape, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) From deaee6b34969cd991ce72afefb2a6533f9243901 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jan 2024 14:59:24 +0100 Subject: [PATCH 06/14] skip c order indirection --- .../datastore/datareaders/AxisOrder.scala | 3 +-- .../datastore/datareaders/ChunkUtils.scala | 1 - .../datastore/datareaders/DatasetArray.scala | 17 ++++++++--------- .../datastore/datareaders/MultiArrayUtils.scala | 5 ----- 4 files changed, 9 insertions(+), 17 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 92f2e12d97c..3f0839c4461 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -112,8 +112,7 @@ object FullAxisOrder { FullAxisOrder(asArray.toVector) } - // Use only for debugging/developing, with single-char axis names, no duplicates! - @deprecated + @deprecated(message = "Use only for debugging/developing, with single-char axis names, no duplicates!", since = "0") def fromString(axisOrderLiteral: String): FullAxisOrder = FullAxisOrder(axisOrderLiteral.map(char => Axis(name = char.toString))) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala index 6f28086cdea..2480a6b18ce 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala @@ -1,6 +1,5 @@ package com.scalableminds.webknossos.datastore.datareaders -import com.scalableminds.webknossos.datastore.datareaders.ChunkUtils.logger import com.typesafe.scalalogging.LazyLogging object ChunkUtils extends LazyLogging { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index cc0d498909c..5ff46c7c23b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -156,18 +156,17 @@ class DatasetArray(vaultPath: VaultPath, } else { val targetBuffer = MultiArrayUtils.createDataBuffer(header.resolvedDataType, shape) val targetMultiArray = MultiArrayUtils.createArrayWithGivenStorage(targetBuffer, shape.reverse) - val targetInCOrder: MultiArray = MultiArrayUtils.orderFlippedView(targetMultiArray) val copiedFuture = Fox.combined(chunkIndices.map { chunkIndex: Array[Int] => for { sourceChunk: MultiArray <- getSourceChunkDataWithCache(fullAxisOrder.permuteIndicesWkToArray(chunkIndex)) - offsetInChunk = computeOffsetInChunk(chunkIndex, totalOffset) - sourceChunkInCOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, - fullAxisOrder, - flip = header.order != ArrayOrder.C) - _ <- tryo(MultiArrayUtils.copyRange(offsetInChunk, sourceChunkInCOrder, targetInCOrder)) ?~> formatCopyRangeError( - offsetInChunk, - sourceChunkInCOrder, - targetInCOrder) + sourceChunkInWkFOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, + fullAxisOrder, + flip = header.order == ArrayOrder.C) + offsetInChunkFOrder = computeOffsetInChunk(chunkIndex, totalOffset).reverse + _ <- tryo(MultiArrayUtils.copyRange(offsetInChunkFOrder, sourceChunkInWkFOrder, targetMultiArray)) ?~> formatCopyRangeError( + offsetInChunkFOrder, + sourceChunkInWkFOrder, + targetMultiArray) } yield () }) for { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index 2e7d8eef467..19c8c290068 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -120,11 +120,6 @@ object MultiArrayUtils { def set(sourceIterator: IndexIterator, targetIterator: IndexIterator): Unit } - def orderFlippedView(source: MultiArray): MultiArray = { - val permutation = source.getShape.indices.reverse.toArray - source.permute(permutation) - } - def axisOrderXYZView(source: MultiArray, fullAxisOrder: FullAxisOrder, flip: Boolean): MultiArray = { // create a view in which the last axes are (c)XYZ, the rest are the additional axes val permutation = fullAxisOrder.arrayToWkPermutation From a4186edc6aa17b9d985972a33e1e8d4d86accc37 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jan 2024 15:11:58 +0100 Subject: [PATCH 07/14] remove logging --- .../datastore/datareaders/ChunkUtils.scala | 7 +------ .../datastore/datareaders/DatasetArray.scala | 17 +++++------------ 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala index 2480a6b18ce..60378c05c7b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala @@ -1,8 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders -import com.typesafe.scalalogging.LazyLogging - -object ChunkUtils extends LazyLogging { +object ChunkUtils { def computeChunkIndices(arrayShapeOpt: Option[Array[Int]], arrayChunkShape: Array[Int], selectedShape: Array[Int], @@ -40,9 +38,6 @@ object ChunkUtils extends LazyLogging { dimIndex = -1 } } - logger.info(s"selected offset: ${selectedOffset.mkString(",")}, selectedShape: ${selectedShape - .mkString(",")} arrayShapeOpt ${arrayShapeOpt.map(_.mkString(","))} arrayChunkShape ${arrayChunkShape.mkString( - ",")}. returning chunkIndices ${chunkIndices.toList.map(_.mkString(","))}") chunkIndices.toList } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 5ff46c7c23b..dfd7454c9eb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -137,11 +137,6 @@ class DatasetArray(vaultPath: VaultPath, // This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned. private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( implicit ec: ExecutionContext): Fox[MultiArray] = { - logger.info(s"full order: $fullAxisOrder") - logger.info(s"reading shape ${printAsOuter(shape)} at ${printAsOuter(offset)}") - logger.info( - s"ds shape: ${datasetShape.map(printAsInner)} (permuted to outer(${datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk).map(printAsOuter)})), chunk shape: ${printAsInner( - chunkShape)} (permuted to outer(${printAsOuter(fullAxisOrder.permuteIndicesArrayToWk(chunkShape))}))") val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0) val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), fullAxisOrder.permuteIndicesArrayToWk(chunkShape), @@ -176,8 +171,8 @@ class DatasetArray(vaultPath: VaultPath, } private def formatCopyRangeError(offsetInChunk: Array[Int], sourceChunk: MultiArray, target: MultiArray): String = - s"Copying data from dataset chunk failed. Chunk shape: ${sourceChunk.getShape.mkString(",")}, target shape: ${target.getShape - .mkString(",")}, offset: ${offsetInChunk.mkString(",")}" + s"Copying data from dataset chunk failed. Chunk shape: ${printAsOuter(sourceChunk.getShape)}, target shape: ${printAsOuter( + target.getShape)}, offsetInChunk: ${printAsOuter(offsetInChunk)}. Axis order: $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = ??? @@ -186,11 +181,9 @@ class DatasetArray(vaultPath: VaultPath, s"${dataSourceId}__${layerName}__${vaultPath}__chunk_${chunkIndex.mkString(",")}" private def getSourceChunkDataWithCache(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean = false)( - implicit ec: ExecutionContext): Fox[MultiArray] = { - logger.info(s"reading chunk ${printAsInner(chunkIndex)}") + implicit ec: ExecutionContext): Fox[MultiArray] = sharedChunkContentsCache.getOrLoad(chunkContentsCacheKey(chunkIndex), _ => readSourceChunkData(chunkIndex, useSkipTypingShortcut)) - } private def readSourceChunkData(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean)( implicit ec: ExecutionContext): Fox[MultiArray] = @@ -238,8 +231,8 @@ class DatasetArray(vaultPath: VaultPath, }.toArray override def toString: String = - s"${getClass.getCanonicalName} {fullAxisOrder=$fullAxisOrder shape=${header.datasetShape.mkString(",")} chunkShape=${header.chunkShape.mkString( - ",")} dtype=${header.resolvedDataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.summary}}" + s"${getClass.getCanonicalName} fullAxisOrder=$fullAxisOrder shape=${header.datasetShape.map(printAsInner)} chunkShape=${printAsInner( + header.chunkShape)} dtype=${header.resolvedDataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.summary}}" } From 29eccb04d8da338525a05fb8688a5581ddc5ca6b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jan 2024 18:03:48 +0100 Subject: [PATCH 08/14] fix permutation --- .../datastore/datareaders/AxisOrder.scala | 9 ++++--- .../datastore/datareaders/ChunkTyper.scala | 2 +- .../datastore/datareaders/DatasetArray.scala | 26 +++++++++---------- .../datareaders/MultiArrayUtils.scala | 12 ++++----- 4 files changed, 26 insertions(+), 23 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 3f0839c4461..9933f950a7f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -74,7 +74,10 @@ case class FullAxisOrder(axes: Seq[Axis]) { permutationMutable } - def wkToArrayPermutation: Array[Int] = { + lazy val arrayFToWkFPermutation: Array[Int] = arrayToWkPermutation.reverse.map(elem => rank - 1 - elem) + lazy val arrayCToWkFPermutation: Array[Int] = arrayToWkPermutation.reverse + + lazy val wkToArrayPermutation: Array[Int] = { val permutationMutable: Array[Int] = Array.fill(arrayToWkPermutation.length)(0) arrayToWkPermutation.zipWithIndex.foreach { case (p, i) => @@ -89,6 +92,7 @@ case class FullAxisOrder(axes: Seq[Axis]) { def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = arrayToWkPermutation.map(indices(_)) + def printPermuted(permutation: Array[Int]): String = permutation.map(axes(_)).map(_.name).mkString("") } object FullAxisOrder { @@ -112,8 +116,7 @@ object FullAxisOrder { FullAxisOrder(asArray.toVector) } - @deprecated(message = "Use only for debugging/developing, with single-char axis names, no duplicates!", since = "0") - def fromString(axisOrderLiteral: String): FullAxisOrder = + def fromStringForTests(axisOrderLiteral: String): FullAxisOrder = FullAxisOrder(axisOrderLiteral.map(char => Axis(name = char.toString))) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala index 955a7b35ab4..a5ba7c347c0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala @@ -26,7 +26,7 @@ abstract class ChunkTyper { def wrapAndType(bytes: Array[Byte], chunkShape: Array[Int]): Box[MultiArray] def createFromFillValue(chunkShape: Array[Int]): Box[MultiArray] = - MultiArrayUtils.createFilledArray(ma2DataType, chunkShape, header.fillValueNumber) + MultiArrayUtils.createFilledArray(ma2DataType, chunkShapeOrdered(chunkShape), header.fillValueNumber) // Chunk shape in header is in C-Order (XYZ), but data may be in F-Order (ZYX), so the chunk shape // associated with the array needs to be adjusted. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index dfd7454c9eb..f085ed3225d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} @@ -26,8 +25,7 @@ class DatasetArray(vaultPath: VaultPath, axisOrder: AxisOrder, channelIndex: Option[Int], additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray]) - extends LazyLogging { + sharedChunkContentsCache: AlfuCache[String, MultiArray]) { protected lazy val fullAxisOrder: FullAxisOrder = FullAxisOrder.fromAxisOrderAndAdditionalAxes(axisOrder, additionalAxes) @@ -111,9 +109,10 @@ class DatasetArray(vaultPath: VaultPath, asBytes <- BytesConverter.toByteArray(typedMultiArray, header.resolvedDataType, ByteOrder.LITTLE_ENDIAN) } yield asBytes - private def printAsInner(values: Array[Int]): String = { + private def printAsInner(values: Array[Int], flip: Boolean = false): String = { val axisNames = fullAxisOrder.axes.map(_.name) - val raw = axisNames + val axisNamesFlippedIfNeeded = if (flip) axisNames.reverse else axisNames + val raw = axisNamesFlippedIfNeeded .zip(values) .map { tuple => f"${tuple._1}=${tuple._2}" @@ -122,9 +121,10 @@ class DatasetArray(vaultPath: VaultPath, f"inner($raw)" } - private def printAsOuter(values: Array[Int]): String = { + private def printAsOuter(values: Array[Int], flip: Boolean = false): String = { val axisNames = fullAxisOrder.axesWk.map(_.name) - val raw = axisNames + val axisNamesFlippedIfNeeded = if (flip) axisNames.reverse else axisNames + val raw = axisNamesFlippedIfNeeded .zip(values) .map { tuple => f"${tuple._1}=${tuple._2}" @@ -154,9 +154,8 @@ class DatasetArray(vaultPath: VaultPath, val copiedFuture = Fox.combined(chunkIndices.map { chunkIndex: Array[Int] => for { sourceChunk: MultiArray <- getSourceChunkDataWithCache(fullAxisOrder.permuteIndicesWkToArray(chunkIndex)) - sourceChunkInWkFOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, - fullAxisOrder, - flip = header.order == ArrayOrder.C) + sourceChunkInWkFOrder: MultiArray = MultiArrayUtils + .axisOrderXYZViewF(sourceChunk, fullAxisOrder, sourceIsF = header.order == ArrayOrder.F) offsetInChunkFOrder = computeOffsetInChunk(chunkIndex, totalOffset).reverse _ <- tryo(MultiArrayUtils.copyRange(offsetInChunkFOrder, sourceChunkInWkFOrder, targetMultiArray)) ?~> formatCopyRangeError( offsetInChunkFOrder, @@ -171,8 +170,9 @@ class DatasetArray(vaultPath: VaultPath, } private def formatCopyRangeError(offsetInChunk: Array[Int], sourceChunk: MultiArray, target: MultiArray): String = - s"Copying data from dataset chunk failed. Chunk shape: ${printAsOuter(sourceChunk.getShape)}, target shape: ${printAsOuter( - target.getShape)}, offsetInChunk: ${printAsOuter(offsetInChunk)}. Axis order: $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" + s"Copying data from dataset chunk failed. Chunk shape: ${printAsOuter(sourceChunk.getShape, flip = true)}, target shape: ${printAsOuter( + target.getShape, + flip = true)}, offsetInChunk: ${printAsOuter(offsetInChunk, flip = true)}. Axis order (C-order): $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = ??? @@ -231,7 +231,7 @@ class DatasetArray(vaultPath: VaultPath, }.toArray override def toString: String = - s"${getClass.getCanonicalName} fullAxisOrder=$fullAxisOrder shape=${header.datasetShape.map(printAsInner)} chunkShape=${printAsInner( + s"${getClass.getCanonicalName} fullAxisOrder=$fullAxisOrder shape=${header.datasetShape.map(s => printAsInner(s))} chunkShape=${printAsInner( header.chunkShape)} dtype=${header.resolvedDataType} fillValue=${header.fillValueNumber}, ${header.compressorImpl}, byteOrder=${header.byteOrder}, vault=${vaultPath.summary}}" } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index 19c8c290068..f1af69b890c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -1,13 +1,14 @@ package com.scalableminds.webknossos.datastore.datareaders import ArrayDataType.ArrayDataType +import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box import net.liftweb.common.Box.tryo import ucar.ma2.{IndexIterator, InvalidRangeException, Range, Array => MultiArray, DataType => MADataType} import java.util -object MultiArrayUtils { +object MultiArrayUtils extends LazyLogging { def createDataBuffer(dataType: ArrayDataType, shape: Array[Int]): Object = { val length = shape.product @@ -120,11 +121,10 @@ object MultiArrayUtils { def set(sourceIterator: IndexIterator, targetIterator: IndexIterator): Unit } - def axisOrderXYZView(source: MultiArray, fullAxisOrder: FullAxisOrder, flip: Boolean): MultiArray = { - // create a view in which the last axes are (c)XYZ, the rest are the additional axes - val permutation = fullAxisOrder.arrayToWkPermutation - val flippedIfNeeded = if (flip) permutation.reverse else permutation - source.permute(flippedIfNeeded) + def axisOrderXYZViewF(source: MultiArray, fullAxisOrder: FullAxisOrder, sourceIsF: Boolean): MultiArray = { + // create view with F order and wk-compatible axis order + val permutation = if (sourceIsF) fullAxisOrder.arrayFToWkFPermutation else fullAxisOrder.arrayCToWkFPermutation + source.permute(permutation) } } From 3467e3b1302926f4d5f2ca457dfb45b43a2718eb Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 31 Jan 2024 15:38:41 +0100 Subject: [PATCH 09/14] cleanup, fix volume download, fix 2d, fix reading without additionalCoordinates --- .../AxisOrderPermutationTestSuite.scala | 53 +++++++++++++ .../datastore/datareaders/AxisOrder.scala | 20 ++--- .../datastore/datareaders/DatasetArray.scala | 38 +++++---- .../datastore/datareaders/n5/N5Array.scala | 20 ++--- .../precomputed/PrecomputedArray.scala | 23 +++--- .../datastore/datareaders/wkw/WKWArray.scala | 11 ++- .../datareaders/zarr/ZarrArray.scala | 25 +++--- .../datareaders/zarr3/Zarr3Array.scala | 20 ++--- .../models/datasource/AdditionalAxis.scala | 5 +- .../volume/VolumeTracingService.scala | 1 - .../volume/Zarr3BucketStreamSink.scala | 78 ++++++++++++------- 11 files changed, 199 insertions(+), 95 deletions(-) create mode 100644 test/backend/AxisOrderPermutationTestSuite.scala diff --git a/test/backend/AxisOrderPermutationTestSuite.scala b/test/backend/AxisOrderPermutationTestSuite.scala new file mode 100644 index 00000000000..05ca51f7ecd --- /dev/null +++ b/test/backend/AxisOrderPermutationTestSuite.scala @@ -0,0 +1,53 @@ +package backend + +import com.scalableminds.webknossos.datastore.datareaders.{Axis, FullAxisOrder} +import org.scalatestplus.play.PlaySpec + +class AxisOrderPermutationTestSuite extends PlaySpec { + + private def permute(permutation: Array[Int], str: String): String = + permutation.map(i => str(i)).mkString("") + + def orderFromStringChars(str: String) = FullAxisOrder(str.map(char => Axis(name = char.toString))) + + private def permuteAxisOrderArrayCtoWkC(str: String) = { + val axisOrder = orderFromStringChars(str) + permute(axisOrder.arrayToWkPermutation, axisOrder.toString) + } + + private def permuteAxisOrderArrayFtoWkF(str: String) = { + val axisOrder = orderFromStringChars(str) + val axisOrderFStr = axisOrder.toString.reverse + permute(axisOrder.arrayFToWkFPermutation, axisOrderFStr) + } + + private def permuteAxisOrderArrayCtoWkF(str: String) = { + val axisOrder = orderFromStringChars(str) + permute(axisOrder.arrayCToWkFPermutation, axisOrder.toString) + } + + "AxisOrderPermutation" should { + "correctly permute from C (array) to C (wk)" in { + assert(permuteAxisOrderArrayCtoWkC("xyz") == "xyz") + assert(permuteAxisOrderArrayCtoWkC("cxyz") == "cxyz") + assert(permuteAxisOrderArrayCtoWkC("xycz") == "cxyz") + assert(permuteAxisOrderArrayCtoWkC("xasdfczy") == "asdfcxyz") + } + + "correctly permute from F (array) to F (wk)" in { + assert(permuteAxisOrderArrayFtoWkF("xyz") == "zyx") + assert(permuteAxisOrderArrayFtoWkF("cxyz") == "zyxc") + assert(permuteAxisOrderArrayFtoWkF("xycz") == "zyxc") + assert(permuteAxisOrderArrayFtoWkF("xasdfczy") == "zyxcfdsa") + } + + "correctly permute from C (array) to F (wk)" in { + assert(permuteAxisOrderArrayCtoWkF("xyz") == "zyx") + assert(permuteAxisOrderArrayCtoWkF("cxyz") == "zyxc") + assert(permuteAxisOrderArrayCtoWkF("xycz") == "zyxc") + assert(permuteAxisOrderArrayCtoWkF("xasdfczy") == "zyxcfdsa") + assert(permuteAxisOrderArrayCtoWkF("tasxdfczy") == "zyxcfdsat") + } + } + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 9933f950a7f..41f6eec0bc9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -20,6 +20,7 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { val lengthOfC = if (c.isDefined) 1 else 0 lengthOfC + 2 + lengthOfZ } + } object AxisOrder { @@ -44,6 +45,7 @@ object AxisOrder { case class Axis(name: String) +// Constructed from AxisOrder and AdditionalAxes. Always contains the full rank (plus 1 for z in 2d adapter case). case class FullAxisOrder(axes: Seq[Axis]) { override def toString: String = axes.map(_.name).mkString("") @@ -77,7 +79,7 @@ case class FullAxisOrder(axes: Seq[Axis]) { lazy val arrayFToWkFPermutation: Array[Int] = arrayToWkPermutation.reverse.map(elem => rank - 1 - elem) lazy val arrayCToWkFPermutation: Array[Int] = arrayToWkPermutation.reverse - lazy val wkToArrayPermutation: Array[Int] = { + private lazy val wkToArrayPermutation: Array[Int] = { val permutationMutable: Array[Int] = Array.fill(arrayToWkPermutation.length)(0) arrayToWkPermutation.zipWithIndex.foreach { case (p, i) => @@ -92,13 +94,14 @@ case class FullAxisOrder(axes: Seq[Axis]) { def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] = arrayToWkPermutation.map(indices(_)) - def printPermuted(permutation: Array[Int]): String = permutation.map(axes(_)).map(_.name).mkString("") } object FullAxisOrder { - def fromAxisOrderAndAdditionalAxes(axisOrder: AxisOrder, + + def fromAxisOrderAndAdditionalAxes(rank: Int, + axisOrder: AxisOrder, additionalAxes: Option[Seq[AdditionalAxis]]): FullAxisOrder = { - val asArray: Array[Axis] = Array.fill(additionalAxes.map(_.length).getOrElse(0) + axisOrder.length)(Axis("")) + val asArray: Array[Axis] = Array.fill(rank)(Axis("")) asArray(axisOrder.x) = Axis("x") asArray(axisOrder.y) = Axis("y") axisOrder.c.foreach { c => @@ -107,16 +110,13 @@ object FullAxisOrder { axisOrder.z.foreach { z => asArray(z) = Axis("z") } + if (!axisOrder.hasZAxis) { + asArray(asArray.length - 1) = Axis("z") // Adapter for reading 2D datasets + } for (additionalAxis <- additionalAxes.getOrElse(Seq.empty)) { asArray(additionalAxis.index) = Axis(additionalAxis.name) } - if (!axisOrder.hasZAxis) { - asArray(asArray.length) = Axis("z") // Adapter for reading 2D datasets - } FullAxisOrder(asArray.toVector) } - def fromStringForTests(axisOrderLiteral: String): FullAxisOrder = - FullAxisOrder(axisOrderLiteral.map(char => Axis(name = char.toString))) - } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index f085ed3225d..77e08ebc2bf 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -28,14 +28,14 @@ class DatasetArray(vaultPath: VaultPath, sharedChunkContentsCache: AlfuCache[String, MultiArray]) { protected lazy val fullAxisOrder: FullAxisOrder = - FullAxisOrder.fromAxisOrderAndAdditionalAxes(axisOrder, additionalAxes) + FullAxisOrder.fromAxisOrderAndAdditionalAxes(rank, axisOrder, additionalAxes) protected lazy val chunkReader: ChunkReader = new ChunkReader(header) private lazy val additionalAxesMap: Map[String, AdditionalAxis] = additionalAxes match { case Some(additionalAxis) => - additionalAxis.map(additionalAxis => (additionalAxis.name -> additionalAxis)).toMap + additionalAxis.map(additionalAxis => additionalAxis.name -> additionalAxis).toMap case None => Map() } @@ -68,8 +68,20 @@ class DatasetArray(vaultPath: VaultPath, shapeXYZ: Vec3Int, offsetXYZ: Vec3Int, additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], - shouldReadUint24: Boolean = false)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { - + shouldReadUint24: Boolean = false)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + (shapeArray, offsetArray) <- tryo(constructShapeAndOffsetArrays( + shapeXYZ, + offsetXYZ, + additionalCoordinatesOpt, + shouldReadUint24)) ?~> "failed to construct shape and offset array for requested coordinates" + bytes <- readBytes(shapeArray, offsetArray) + } yield bytes + + private def constructShapeAndOffsetArrays(shapeXYZ: Vec3Int, + offsetXYZ: Vec3Int, + additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], + shouldReadUint24: Boolean = false): (Array[Int], Array[Int]) = { val shapeArray: Array[Int] = Array.fill(rank)(1) shapeArray(rank - 3) = shapeXYZ.x shapeArray(rank - 2) = shapeXYZ.y @@ -99,7 +111,7 @@ class DatasetArray(vaultPath: VaultPath, // shapeArray at positions of additional coordinates is always 1 } } - readBytes(shapeArray, offsetArray) + (shapeArray, offsetArray) } // returns byte array in fortran-order with little-endian values @@ -121,10 +133,9 @@ class DatasetArray(vaultPath: VaultPath, f"inner($raw)" } - private def printAsOuter(values: Array[Int], flip: Boolean = false): String = { - val axisNames = fullAxisOrder.axesWk.map(_.name) - val axisNamesFlippedIfNeeded = if (flip) axisNames.reverse else axisNames - val raw = axisNamesFlippedIfNeeded + private def printAsOuterF(values: Array[Int]): String = { + val axisNamesFOrder = fullAxisOrder.axesWk.map(_.name).reverse + val raw = axisNamesFOrder .zip(values) .map { tuple => f"${tuple._1}=${tuple._2}" @@ -170,12 +181,11 @@ class DatasetArray(vaultPath: VaultPath, } private def formatCopyRangeError(offsetInChunk: Array[Int], sourceChunk: MultiArray, target: MultiArray): String = - s"Copying data from dataset chunk failed. Chunk shape: ${printAsOuter(sourceChunk.getShape, flip = true)}, target shape: ${printAsOuter( - target.getShape, - flip = true)}, offsetInChunk: ${printAsOuter(offsetInChunk, flip = true)}. Axis order (C-order): $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" + s"Copying data from dataset chunk failed. Chunk shape (F): ${printAsOuterF(sourceChunk.getShape)}, target shape (F): ${printAsOuterF( + target.getShape)}, offsetInChunk: ${printAsOuterF(offsetInChunk)}. Axis order (C): $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( - implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = ??? + implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = ??? // Defined in subclass private def chunkContentsCacheKey(chunkIndex: Array[Int]): String = s"${dataSourceId}__${layerName}__${vaultPath}__chunk_${chunkIndex.mkString(",")}" @@ -240,5 +250,5 @@ object DatasetArray { private val chunkSizeLimitBytes: Int = 300 * 1024 * 1024 def assertChunkSizeLimit(bytesPerChunk: Int)(implicit ec: ExecutionContext): Fox[Unit] = - bool2Fox(bytesPerChunk <= chunkSizeLimitBytes) ?~> f"Array chunk size exceeds limit of ${chunkSizeLimitBytes}, got ${bytesPerChunk}" + bool2Fox(bytesPerChunk <= chunkSizeLimitBytes) ?~> f"Array chunk size exceeds limit of $chunkSizeLimitBytes, got $bytesPerChunk" } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala index 1d26dd1bc1f..27541d75dd1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Fox.box2Fox +import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext @@ -26,15 +27,16 @@ object N5Array extends LazyLogging { .readBytes() ?~> s"Could not read header at ${N5Header.FILENAME_ATTRIBUTES_JSON}" header <- JsonHelper.parseAndValidateJson[N5Header](headerBytes) ?~> "Could not parse array header" _ <- DatasetArray.assertChunkSizeLimit(header.bytesPerChunk) - } yield - new N5Array(path, - dataSourceId, - layerName, - header, - axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), - channelIndex, - additionalAxes, - sharedChunkContentsCache) + array <- tryo( + new N5Array(path, + dataSourceId, + layerName, + header, + axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), + channelIndex, + additionalAxes, + sharedChunkContentsCache)) ?~> "Could not open n5 array" + } yield array } class N5Array(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 50541d163cc..f7cc98ef8d1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -34,17 +34,18 @@ object PrecomputedArray extends LazyLogging { scale <- rootHeader.getScale(magPath.basename) ?~> s"Header does not contain scale ${magPath.basename}" scaleHeader = PrecomputedScaleHeader(scale, rootHeader) _ <- DatasetArray.assertChunkSizeLimit(scaleHeader.bytesPerChunk) - } yield - new PrecomputedArray( - magPath, - dataSourceId, - layerName, - scaleHeader, - axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(scaleHeader.rank)), - channelIndex, - additionalAxes, - sharedChunkContentsCache - ) + array <- tryo( + new PrecomputedArray( + magPath, + dataSourceId, + layerName, + scaleHeader, + axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(scaleHeader.rank)), + channelIndex, + additionalAxes, + sharedChunkContentsCache + )) ?~> "Could not open neuroglancerPrecomputed array" + } yield array } class PrecomputedArray(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index 8f7f9c2a62b..b2d30cba9c0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkUtils import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} import net.liftweb.common.Box +import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} import java.io.ByteArrayInputStream @@ -31,7 +32,15 @@ object WKWArray { .readBytes() ?~> s"Could not read header at ${WKWDataFormat.FILENAME_HEADER_WKW}" dataInputStream = new LittleEndianDataInputStream(new ByteArrayInputStream(headerBytes)) header <- WKWHeader(dataInputStream, readJumpTable = false).toFox - } yield new WKWArray(path, dataSourceId, layerName, header, AxisOrder.cxyz, None, None, sharedChunkContentsCache) + array <- tryo(new WKWArray(path, + dataSourceId, + layerName, + header, + AxisOrder.cxyz, + None, + None, + sharedChunkContentsCache)) ?~> "Could not open wkw array" + } yield array } class WKWArray(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala index 5d3f6cf3076..dbc1b5af04d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala @@ -9,6 +9,7 @@ import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.Box.tryo import scala.concurrent.ExecutionContext @@ -25,17 +26,19 @@ object ZarrArray extends LazyLogging { .readBytes() ?~> s"Could not read header at ${ZarrHeader.FILENAME_DOT_ZARRAY}" header <- JsonHelper.parseAndValidateJson[ZarrHeader](headerBytes) ?~> "Could not parse array header" _ <- DatasetArray.assertChunkSizeLimit(header.bytesPerChunk) - } yield - new ZarrArray( - path, - dataSourceId, - layerName, - header, - axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), - channelIndex, - additionalAxes, - sharedChunkContentsCache - ) + array <- tryo( + new ZarrArray( + path, + dataSourceId, + layerName, + header, + axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(header.rank)), + channelIndex, + additionalAxes, + sharedChunkContentsCache + )) ?~> "Could not open zarr2 array" + } yield array + } class ZarrArray(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index b4390e09f1d..0730302b57b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -7,6 +7,7 @@ import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReade import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} import scala.collection.immutable.NumericRange @@ -25,15 +26,16 @@ object Zarr3Array extends LazyLogging { headerBytes <- (path / Zarr3ArrayHeader.FILENAME_ZARR_JSON) .readBytes() ?~> s"Could not read header at ${Zarr3ArrayHeader.FILENAME_ZARR_JSON}" header <- JsonHelper.parseAndValidateJson[Zarr3ArrayHeader](headerBytes) ?~> "Could not parse array header" - } yield - new Zarr3Array(path, - dataSourceId, - layerName, - header, - axisOrderOpt.getOrElse(AxisOrder.asCxyzFromRank(header.rank)), - channelIndex, - additionalAxes, - sharedChunkContentsCache) + array <- tryo( + new Zarr3Array(path, + dataSourceId, + layerName, + header, + axisOrderOpt.getOrElse(AxisOrder.asCxyzFromRank(header.rank)), + channelIndex, + additionalAxes, + sharedChunkContentsCache)) ?~> "Could not open zarr3 array" + } yield array } class Zarr3Array(vaultPath: VaultPath, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/AdditionalAxis.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/AdditionalAxis.scala index c69a914eb14..05402b26821 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/AdditionalAxis.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/AdditionalAxis.scala @@ -6,8 +6,9 @@ import play.api.libs.json.{Format, Json} // bounds: lower bound inclusive, upper bound exclusive case class AdditionalAxis(name: String, bounds: Array[Int], index: Int) { - def lowerBound: Int = bounds(0) - def upperBound: Int = bounds(1) + lazy val lowerBound: Int = bounds(0) + lazy val upperBound: Int = bounds(1) + lazy val highestValue: Int = upperBound - 1 } object AdditionalAxis { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 840be9dd7b5..f6905b35df8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -378,7 +378,6 @@ class VolumeTracingService @Inject()( new Zarr3BucketStreamSink(dataLayer, tracing.fallbackLayer.nonEmpty)( dataLayer.bucketProvider.bucketStream(Some(tracing.version)), tracing.resolutions.map(mag => vec3IntFromProto(mag)), - tracing.additionalAxes, voxelSize) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 8806f506505..e3eb6b4b991 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -12,9 +12,13 @@ import com.scalableminds.webknossos.datastore.datareaders.{ IntCompressionSetting, StringCompressionSetting } -import com.scalableminds.webknossos.datastore.geometry.AdditionalAxisProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{ + AdditionalAxis, + DataLayer, + DataSourceId, + GenericDataSource +} import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition} import play.api.libs.json.Json @@ -29,24 +33,25 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay private lazy val defaultLayerName = "volumeAnnotationData" private lazy val dimensionSeparator = "." - def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], - mags: Seq[Vec3Int], - additionalAxes: Seq[AdditionalAxisProto], - voxelSize: Option[Vec3Double])(implicit ec: ExecutionContext): Iterator[NamedStream] = { - val rank = additionalAxes.length + 4 + private lazy val rank = layer.additionalAxes.getOrElse(Seq.empty).length + 4 + private lazy val additionalAxesSorted = reorderAdditionalAxes(layer.additionalAxes.getOrElse(Seq.empty)) + + def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], mags: Seq[Vec3Int], voxelSize: Option[Vec3Double])( + implicit ec: ExecutionContext): Iterator[NamedStream] = { + val header = Zarr3ArrayHeader( zarr_format = 3, node_type = "array", // channel, additional axes, XYZ - shape = Array(1) ++ additionalAxes.map(_.bounds.y).toArray ++ layer.boundingBox.bottomRight.toArray, + shape = Array(1) ++ additionalAxesSorted.map(_.highestValue).toArray ++ layer.boundingBox.bottomRight.toArray, data_type = Left(layer.elementClass.toString), chunk_grid = Left( ChunkGridSpecification( "regular", ChunkGridConfiguration( - chunk_shape = Array.fill(additionalAxes.length + 1)(1) ++ Array(DataLayer.bucketLength, - DataLayer.bucketLength, - DataLayer.bucketLength)) + chunk_shape = Array.fill(1 + additionalAxesSorted.length)(1) ++ Array(DataLayer.bucketLength, + DataLayer.bucketLength, + DataLayer.bucketLength)) )), chunk_key_encoding = ChunkKeyEncoding("default", @@ -65,7 +70,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay ) ), storage_transformers = None, - dimension_names = Some(Array("c") ++ additionalAxes.map(_.name).toArray ++ Seq("x", "y", "z")) + dimension_names = Some(Array("c") ++ additionalAxesSorted.map(_.name).toArray ++ Seq("x", "y", "z")) ) bucketStream.flatMap { case (bucket, data) => @@ -74,7 +79,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None } else { - val filePath = zarrChunkFilePath(defaultLayerName, bucket) + val filePath = zarrChunkFilePath(defaultLayerName, bucket, additionalAxesSorted) Some( NamedFunctionStream( filePath, @@ -88,14 +93,11 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay } ++ Seq( NamedFunctionStream.fromString( GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON, - Json.prettyPrint(Json.toJson(createVolumeDataSource(layer, voxelSize))) + Json.prettyPrint(Json.toJson(createVolumeDataSource(voxelSize))) )) } - private def createVolumeDataSource(layer: VolumeTracingLayer, - voxelSize: Option[Vec3Double]): GenericDataSource[DataLayer] = { - val additionalAxes = layer.additionalAxes.flatMap(a => if (a.isEmpty) None else Some(a)) - val rank = additionalAxes.map(_.length).getOrElse(0) + 4 + private def createVolumeDataSource(voxelSize: Option[Vec3Double]): GenericDataSource[DataLayer] = { val magLocators = layer.tracing.resolutions.map { mag => MagLocator(mag = vec3IntToProto(mag), axisOrder = Some(AxisOrder(c = Some(0), x = rank - 3, y = rank - 2, z = Some(rank - 1)))) @@ -103,26 +105,48 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay GenericDataSource( id = DataSourceId("", ""), dataLayers = List( - Zarr3SegmentationLayer(defaultLayerName, - layer.boundingBox, - layer.elementClass, - magLocators.toList, - additionalAxes = additionalAxes)), // TODO their indexes are no longer right + Zarr3SegmentationLayer( + defaultLayerName, + layer.boundingBox, + layer.elementClass, + magLocators.toList, + additionalAxes = + if (additionalAxesSorted.isEmpty) None + else Some(additionalAxesSorted) + )), scale = voxelSize.getOrElse(Vec3Double.ones) // Download should still be available if the dataset no longer exists. In that case, the voxel size is unknown ) } - private def zarrChunkFilePath(layerName: String, bucketPosition: BucketPosition): String = { + private def reorderAdditionalAxes(additionalAxes: Seq[AdditionalAxis]): Seq[AdditionalAxis] = { + val additionalAxesStartIndex = 1 // channel comes first + val sorted = additionalAxes.sortBy(_.index) + sorted.zipWithIndex.map { + case (axis, index) => axis.copy(index = index + additionalAxesStartIndex) + } + } + + private def reorderAdditionalCoordinates(additionalCoordinates: Seq[AdditionalCoordinate], + additionalAxesSorted: Seq[AdditionalAxis]): Seq[AdditionalCoordinate] = + additionalCoordinates.sortBy(c => additionalAxesSorted.indexWhere(a => a.name == c.name)) + + private def zarrChunkFilePath(layerName: String, + bucketPosition: BucketPosition, + additionalAxesSorted: Seq[AdditionalAxis]): String = { // In volume annotations, store buckets/chunks as additionalCoordinates, then z,y,x - val additionalCoordinatesPart = additionalCoordinatesFilePath(bucketPosition.additionalCoordinates) + val additionalCoordinatesPart = + additionalCoordinatesFilePath(bucketPosition.additionalCoordinates, additionalAxesSorted) val channelPart = 0 s"$layerName/${bucketPosition.mag.toMagLiteral(allowScalar = true)}/c$dimensionSeparator$channelPart$dimensionSeparator$additionalCoordinatesPart${bucketPosition.bucketX}$dimensionSeparator${bucketPosition.bucketY}$dimensionSeparator${bucketPosition.bucketZ}" } - private def additionalCoordinatesFilePath(additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]]) = + private def additionalCoordinatesFilePath(additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], + additionalAxesSorted: Seq[AdditionalAxis]) = additionalCoordinatesOpt match { case Some(additionalCoordinates) if additionalCoordinates.nonEmpty => - additionalCoordinates.map(_.value).mkString(dimensionSeparator) + dimensionSeparator + reorderAdditionalCoordinates(additionalCoordinates, additionalAxesSorted) + .map(_.value) + .mkString(dimensionSeparator) + dimensionSeparator case _ => "" } From 53f31840983ada8d9cc67a5bf20a98c140734c1b Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 31 Jan 2024 15:47:37 +0100 Subject: [PATCH 10/14] changelog --- CHANGELOG.unreleased.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 945b149089f..9cdd9640d5d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -11,12 +11,14 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released [Commits](https://github.com/scalableminds/webknossos/compare/24.02.0...HEAD) ### Added +- Webknossos can now open ND Zarr datasets with arbitrary axis orders (not limited to `**xyz` anymore). [#7592](https://github.com/scalableminds/webknossos/pull/7592) ### Changed - Datasets stored in WKW format are no longer loaded with memory mapping, reducing memory demands. [#7528](https://github.com/scalableminds/webknossos/pull/7528) ### Fixed - Fixed rare SIGBUS crashes of the datastore module that were caused by memory mapping on unstable file systems. [#7528](https://github.com/scalableminds/webknossos/pull/7528) +- Fixed a bug in ND volume annotation downloads where the additionalAxes metadata had wrong indices. [#7592](https://github.com/scalableminds/webknossos/pull/7592) ### Removed From 5bff6f63a9e5c5a0060a6106979fd4fe1f12b146 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 31 Jan 2024 16:01:54 +0100 Subject: [PATCH 11/14] remove unused default arg --- .../datastore/datareaders/DatasetArray.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 77e08ebc2bf..e3dc88f0f73 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -64,11 +64,10 @@ class DatasetArray(vaultPath: VaultPath, chunkShape // irregular shaped chunk indexes are currently not supported for 2d datasets } - def readBytesWithAdditionalCoordinates( - shapeXYZ: Vec3Int, - offsetXYZ: Vec3Int, - additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], - shouldReadUint24: Boolean = false)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + def readBytesWithAdditionalCoordinates(shapeXYZ: Vec3Int, + offsetXYZ: Vec3Int, + additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], + shouldReadUint24: Boolean)(implicit ec: ExecutionContext): Fox[Array[Byte]] = for { (shapeArray, offsetArray) <- tryo(constructShapeAndOffsetArrays( shapeXYZ, @@ -81,7 +80,7 @@ class DatasetArray(vaultPath: VaultPath, private def constructShapeAndOffsetArrays(shapeXYZ: Vec3Int, offsetXYZ: Vec3Int, additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], - shouldReadUint24: Boolean = false): (Array[Int], Array[Int]) = { + shouldReadUint24: Boolean): (Array[Int], Array[Int]) = { val shapeArray: Array[Int] = Array.fill(rank)(1) shapeArray(rank - 3) = shapeXYZ.x shapeArray(rank - 2) = shapeXYZ.y From ba939e5782206e158a49844dfb0e6956cf69da95 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 1 Feb 2024 15:21:38 +0100 Subject: [PATCH 12/14] better error message in image creator --- .../webknossos/datastore/image/ImageCreator.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/image/ImageCreator.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/image/ImageCreator.scala index fc42899fb58..5de557cc79f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/image/ImageCreator.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/image/ImageCreator.scala @@ -137,8 +137,7 @@ object ImageCreator extends LazyLogging { (0xFF << 24) | (colorRed(grayNormalized) << 16) | (colorGreen(grayNormalized) << 8) | (colorBlue( grayNormalized) << 0) case _ => - throw new Exception( - "Can't handle " + bytesPerElement + " bytes per element in Image creator for a color layer.") + throw new Exception(s"Unsupported ElementClass for color layer thumbnail: $elementClass") } } } From 2663969ad4d04f3f28e798147eaa3dc9f0b5f45a Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 1 Feb 2024 15:29:00 +0100 Subject: [PATCH 13/14] no need to permute indices in chunkIndexToShardIndex --- .../webknossos/datastore/datareaders/zarr3/Zarr3Array.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index 0730302b57b..874dd1d38fd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -160,8 +160,8 @@ class Zarr3Array(vaultPath: VaultPath, private def chunkIndexToShardIndex(chunkIndex: Array[Int]) = ChunkUtils.computeChunkIndices( - header.datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), - fullAxisOrder.permuteIndicesArrayToWk(header.outerChunkShape), + header.datasetShape, + header.outerChunkShape, header.chunkShape, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) From de7173f08a0d2f2f8ed896877d4735d206b9ee66 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 5 Feb 2024 13:43:06 +0100 Subject: [PATCH 14/14] pr feedback --- .../webknossos/datastore/datareaders/AxisOrder.scala | 4 ++-- .../webknossos/datastore/datareaders/DatasetArray.scala | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala index 41f6eec0bc9..558dc657181 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/AxisOrder.scala @@ -16,7 +16,7 @@ case class AxisOrder(x: Int, y: Int, z: Option[Int], c: Option[Int] = None) { } def length: Int = { - val lengthOfZ = 1 // if z is None, we append it an as adapter + val lengthOfZ = 1 // if z is None, we append it as an adapter val lengthOfC = if (c.isDefined) 1 else 0 lengthOfC + 2 + lengthOfZ } @@ -57,7 +57,7 @@ case class FullAxisOrder(axes: Seq[Axis]) { lazy val rank: Int = axes.length lazy val arrayToWkPermutation: Array[Int] = { - // wk is always the additionalAxes + (c)zxy + // wk is always the additionalAxes + (c)xyz val permutationMutable: Array[Int] = Array.fill(axes.length)(0) var additionalAxisIndex = 0 diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index e3dc88f0f73..b04fa57ee80 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -143,8 +143,10 @@ class DatasetArray(vaultPath: VaultPath, f"outer($raw)" } - // Read from array. Note that shape and offset should be passed in XYZ order, left-padded with 0 and 1 respectively. - // This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned. + // Read from array. Note that shape and offset should be passed in “wk” order (…CXYZ) + // The local variables like chunkIndices are also in this order unless explicitly named. + // Loading data adapts to the array's axis order so that …CXYZ data in fortran-order is + // returned, regardless of the array’s internal storage. private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( implicit ec: ExecutionContext): Fox[MultiArray] = { val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0)