Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

sketch: read blocks on demand #227

Draft
wants to merge 14 commits into
base: master
Choose a base branch
from
Draft
4 changes: 2 additions & 2 deletions examples/8_read_raw_blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ fn main() {
let reader = reader

// do not worry about multi-resolution levels or deep data
.filter_chunks(true, |meta_data, tile, block| {
.filter_chunks(|meta_data, tile, block| {
let header = &meta_data.headers[block.layer];
!header.deep && tile.is_largest_resolution_level()
}).unwrap()
Expand All @@ -94,7 +94,7 @@ fn main() {
});

// read all pixel blocks from the image, decompressing in parallel
reader.decompress_parallel(true, |meta_data, block|{
reader.decompress_parallel(|meta_data, block|{
let header = &meta_data.headers[block.index.layer];

// collect all pixel values from the pixel block
Expand Down
120 changes: 120 additions & 0 deletions examples/9_read_blocks_on_demand.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@

extern crate exr;

use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use exr::block::chunk::Chunk;
use exr::block::UncompressedBlock;
use exr::image::read::specific_channels::{read_specific_channels, RecursivePixelReader};
use exr::prelude::{IntegerBounds, ReadSpecificChannel, Vec2};

/// load only some specific pixel sections from the file, just when they are needed.
/// load blocks of pixels into a sparse texture (illustrated with a hashmap in this example).
/// the process is as follows:
///
/// 1. prepare some state (open the file, read meta data, define the channels we want to read)
/// 2. when needed, load more pixel blocks from the file
/// a. load compressed chunks for a specific pixel section
/// b. decompress chunks and extract rgba pixels from the packed channel data in the block
/// c. write the loaded rgba pixel blocks into the sparse texture
fn main() {

// this is where we will store our loaded data.
// for this example, we use a hashmap instead of a real sparse texture.
// it stores blocks of rgba pixels, indexed by the position of the block (i32, i32) and its size
let mut my_sparse_texture: HashMap<(Pos, Size), Vec<[f32; 4]>> = Default::default();
type Pos = (i32, i32);
type Size = (usize, usize);


let file = BufReader::new(
File::open("3GB.exr")
.expect("run example `7_write_raw_blocks` to generate this image file")
);

// initializes a lazy decoder (reads meta data immediately)
let mut chunk_reader = exr::block::read(file, true).unwrap()
.on_demand_chunks().unwrap();

let layer_index = 0; // only load pixels from the first "header" (assumes first layer has rgb channels)
let mip_level = (0, 0); // only load largest mip map

let exr_info = &chunk_reader.meta_data().clone();
let layer_info = &exr_info.headers[layer_index];
let channel_info = &layer_info.channels;
println!("loading header #0 from {:#?}", exr_info);

// this object can decode packed exr blocks to simple rgb (can be shared or cloned across threads)
let rgb_from_block_extractor = read_specific_channels()
.required("R").required("G").required("B")
.optional("A", 1.0)
.create_recursive_reader(channel_info).unwrap(); // this will fail if the image does not contain rgb channels


// ...
// later in your app, maybe when the view changed:
when_new_pixel_section_must_be_loaded(|pixel_section| {

// todo: only load blocks that are not loaded yet. maybe an additional filter? or replace this with a more modular filtering architecture?
let compressed_chunks = chunk_reader
.load_all_chunks_for_display_space_section(layer_index, mip_level, pixel_section)

// in this example, we use .flatten(), this simply discards all errors and only continues with the successfully loaded chunks
// in this example, we collect here due to borrowing meta data
.flatten().collect::<Vec<Chunk>>();

// this could be done in parallel, e.g. by using rayon par_iter
let packed_pixel_blocks = compressed_chunks.into_iter()
.map(|chunk| UncompressedBlock::decompress_chunk(chunk, exr_info, true))
.flatten();

// the exr blocks may contain arbitrary channels, but we are only interested in rgba.
// so we convert each exr block to an rgba block (vec of [f32; 4])
let rgba_blocks = packed_pixel_blocks.map(|block| {
assert_eq!(block.index.layer, layer_index);

let size = block.index.pixel_size;
let position = block.index.pixel_position.to_i32() + layer_info.own_attributes.layer_position;
let mut rgba_buffer = vec![[0.0; 4]; size.area()]; // rgba = 4 floats

// decode individual pixels into our f32 buffer
// automatically converts f16 samples to f32 if required
// ignores all other channel data
rgb_from_block_extractor.read_pixels_from_block(channel_info, block, |position, (r,g,b,a)|{
rgba_buffer[position.flat_index_for_size(size)] = [r,g,b,a];
});

(position, size, rgba_buffer)
});

for (position, size, block) in rgba_blocks {
my_sparse_texture.insert((position.into(), size.into()), block);
}
});

// we're done! print something
println!("\n\nsparse texture now contains {} blocks", my_sparse_texture.len());

// write a png for each block
for (index, ((_pos, (width, height)), block)) in my_sparse_texture.into_iter().enumerate() {
exr::prelude::write_rgba_file(
format!("block #{}.exr", index), width, height,
|x,y| {
let [r,g,b,a] = block[Vec2(x,y).flat_index_for_size((width, height))];
(r,g,b,a)
}
).unwrap();
}
}

/// request to load a specific sub-rect into view
/// (loads a single view once, as this is a stub implementation)
fn when_new_pixel_section_must_be_loaded<'a>(mut load_for_view: impl 'a + FnMut(IntegerBounds)){
let image_sub_section = IntegerBounds::new(
(831, 739), // position
(32, 91) // size
);

load_for_view(image_sub_section);
}
111 changes: 111 additions & 0 deletions examples/9_read_blocks_on_demand_dynamic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@

extern crate exr;

use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use exr::block::chunk::Chunk;
use exr::block::UncompressedBlock;
use exr::image::{AnyChannel, AnyChannels, FlatSamples, Image};
use exr::prelude::{IntegerBounds, WritableImage};

/// load only some specific pixel sections from the file, just when they are needed.
/// load blocks of pixels into a sparse texture (illustrated with a hashmap in this example).
/// the process is as follows:
///
/// 1. prepare some state (open the file, read meta data)
/// 2. when needed, load more pixel blocks from the file
/// a. load compressed chunks for a specific pixel section
/// b. decompress chunks and extract pixels from the packed channel data in the block
/// c. write the loaded pixel blocks into the sparse texture
fn main() {

// this is where we will store our loaded data.
// for this example, we use a hashmap instead of a real sparse texture.
// it stores a vector of channels, each containing either f32, f16, or u32 samples
let mut my_sparse_texture: HashMap<(Pos, Size), Vec<FlatSamples>> = Default::default();
type Pos = (i32, i32);
type Size = (usize, usize);


let file = BufReader::new(
File::open("3GB.exr")
.expect("run example `7_write_raw_blocks` to generate this image file")
);

// initializes a lazy decoder (reads meta data immediately)
let mut chunk_reader = exr::block::read(file, true).unwrap()
.on_demand_chunks().unwrap();

let layer_index = 0; // only load pixels from the first "header" (assumes first layer has rgb channels)
let mip_level = (0, 0); // only load largest mip map

let exr_info = &chunk_reader.meta_data().clone();
let layer_info = &exr_info.headers[layer_index];
let channel_info = &layer_info.channels.list;
println!("loading header #0 from {:#?}", exr_info);

// ...
// later in your app, maybe when the view changed:
when_new_pixel_section_must_be_loaded(|pixel_section| {

// todo: only load blocks that are not loaded yet. maybe an additional filter? or replace this with a more modular filtering architecture?
let compressed_chunks = chunk_reader
.load_all_chunks_for_display_space_section(layer_index, mip_level, pixel_section)

// in this example, we use .flatten(), this simply discards all errors and only continues with the successfully loaded chunks
// in this example, we collect here due to borrowing meta data
.flatten().collect::<Vec<Chunk>>();

// this could be done in parallel, e.g. by using rayon par_iter
let packed_pixel_blocks = compressed_chunks.into_iter()
.map(|chunk| UncompressedBlock::decompress_chunk(chunk, exr_info, true))
.flatten();

// exr blocks store line by line, each line stores all the channels.
// what we might want instead is to store channel by channel, each channel containing all the lines for this block.
let unpacked_blocks = packed_pixel_blocks.map(|block|{
// obtain a vector of channels, where each channel contains the whole block
let channels = block.unpack_channels(layer_info);

let size = block.index.pixel_size;
let position = block.index.pixel_position.to_i32() + layer_info.own_attributes.layer_position;

(position, size, channels)
});

for (position, size, block) in unpacked_blocks {
my_sparse_texture.insert((position.into(), size.into()), block);
}
});


println!("\n\nsparse texture now contains {} blocks", my_sparse_texture.len());

// write a png for each block
for (index, ((_pos, (width, height)), channel_data)) in my_sparse_texture.into_iter().enumerate() {
let path = format!("block #{}.exr", index);
let channel_names = channel_info.iter().map(|c| c.name.clone());

let image = Image::from_channels((width, height), AnyChannels::sort(
channel_names.zip(channel_data)
.map(|(chan, channel_data)| AnyChannel::new(chan, channel_data))
.collect()
));

image.write().to_file(path).unwrap();
}

println!("Written the blocks as exr files.");
}

/// request to load a specific sub-rect into view
/// (loads a single view once, as this is a stub implementation)
fn when_new_pixel_section_must_be_loaded<'a>(mut load_for_view: impl 'a + FnMut(IntegerBounds)){
let image_sub_section = IntegerBounds::new(
(831, 739), // position
(32, 91) // size
);

load_for_view(image_sub_section);
}
6 changes: 3 additions & 3 deletions src/block/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -365,11 +365,11 @@ impl Chunk {
compressed_block: match header.blocks {
// flat data
BlockDescription::ScanLines if !header.deep => CompressedBlock::ScanLine(CompressedScanLineBlock::read(read, max_block_byte_size)?),
BlockDescription::Tiles(_) if !header.deep => CompressedBlock::Tile(CompressedTileBlock::read(read, max_block_byte_size)?),
BlockDescription::Tiles(_) if !header.deep => CompressedBlock::Tile(CompressedTileBlock::read(read, max_block_byte_size)?),

// deep data
BlockDescription::ScanLines => CompressedBlock::DeepScanLine(CompressedDeepScanLineBlock::read(read, max_block_byte_size)?),
BlockDescription::Tiles(_) => CompressedBlock::DeepTile(CompressedDeepTileBlock::read(read, max_block_byte_size)?),
BlockDescription::ScanLines => CompressedBlock::DeepScanLine(CompressedDeepScanLineBlock::read(read, max_block_byte_size)?),
BlockDescription::Tiles(_) => CompressedBlock::DeepTile(CompressedDeepTileBlock::read(read, max_block_byte_size)?),
},
};

Expand Down
4 changes: 2 additions & 2 deletions src/block/lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub struct LineIndex {
/// Index of the mip or rip level in the image.
pub level: Vec2<usize>,

/// Position of the most left pixel of the row.
/// Position of the most left pixel of the row, in data window space.
pub position: Vec2<usize>,

/// The width of the line; the number of samples in this row,
Expand Down Expand Up @@ -191,7 +191,7 @@ impl LineRef<'_> {
pub fn read_samples<T: crate::io::Data>(&self) -> impl Iterator<Item = Result<T>> + '_ {
debug_assert_eq!(self.value.len(), self.location.sample_count * T::BYTE_SIZE, "sample type size does not match line byte size");

let mut read = self.value.clone(); // FIXME deep data
let mut read = self.value; // FIXME deep data
(0..self.location.sample_count).map(move |_| T::read(&mut read))
}
}
36 changes: 36 additions & 0 deletions src/block/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use crate::compression::ByteVec;
use crate::block::chunk::{CompressedBlock, CompressedTileBlock, CompressedScanLineBlock, Chunk, TileCoordinates};
use crate::meta::header::Header;
use crate::block::lines::{LineIndex, LineRef, LineSlice, LineRefMut};
use crate::image::FlatSamples;
use crate::meta::attribute::ChannelList;


Expand Down Expand Up @@ -254,4 +255,39 @@ impl UncompressedBlock {
data: Self::collect_block_data_from_lines(channels, block_index, extract_line)
}
}

/// Unpack the channel data from the raw block bytes.
/// Creates a vector with one entry for each channel.
/// Each channel contains the samples for this whole block.
/// The samples are typed to either `f32`, `f16`, or `u32`.
/// The samples are flattened, in row-major order, according to `Vec2::flat_index_for_size(block_size)`.
pub fn unpack_channels(&self, header: &Header) -> Vec<FlatSamples> {
let block = self;
let layer_info = header;
let channel_info = &layer_info.channels;
let block_size = block.index.pixel_size;

// the whole block, but each channel is one entry in this vec
let mut channels: Vec<FlatSamples> = layer_info.channels.list.iter()
.map(|chan| FlatSamples::new(chan, block.index.pixel_size))
.collect();

for line in block.lines(channel_info) {
let all_lines_for_this_channel = &mut channels[line.location.channel];

// TODO sampling
let position_in_block = line.location.position - block.index.pixel_position;
let start = position_in_block.flat_index_for_size(block_size);
let end = start + block_size.width();

// read either f16, f32, or u32 samples based on the channels type
match all_lines_for_this_channel {
FlatSamples::F16(samples) => line.read_samples_into_slice(&mut samples[start..end]),
FlatSamples::F32(samples) => line.read_samples_into_slice(&mut samples[start..end]),
FlatSamples::U32(samples) => line.read_samples_into_slice(&mut samples[start..end]),
}.expect("line indexing bug");
}

channels
}
}
Loading
Loading