diff --git a/examples/5_read_legacy_layers.rs b/examples/5_read_legacy_layers.rs new file mode 100644 index 00000000..656c15b4 --- /dev/null +++ b/examples/5_read_legacy_layers.rs @@ -0,0 +1,51 @@ + +#[macro_use] +extern crate smallvec; +extern crate rand; +extern crate half; + + +// exr imports +extern crate exr; + +/// Read an image with channel groups from a file. +/// Some legacy software may group layers that contain a `.` in the layer name. +/// +/// Note: This is an OpenEXR legacy strategy. OpenEXR supports layers natively since 2013. +/// Use the natively supported exrs `Layer` types instead, if possible. +/// +fn main() { + use exr::prelude::*; + + let image = read().no_deep_data() + .largest_resolution_level() + + .rgba_channels( + |resolution, _| { + vec![vec![(f16::ZERO, f16::ZERO, f16::ZERO, f16::ZERO); resolution.width()]; resolution.height()] + }, + + // all samples will be converted to f32 (you can also use the enum `Sample` instead of `f32` here to retain the original data type from the file) + |vec, position, (r,g,b,a): (f16, f16, f16, f16)| { + vec[position.y()][position.x()] = (r,g,b,a) + } + ) + + .grouped_channels() + .first_valid_layer() + .all_attributes() + .on_progress(|progress| println!("progress: {:.1}", progress*100.0)) + .from_file("tests/images/valid/openexr/MultiView/Fog.exr") + .unwrap(); + + // output a random color of each channel of each layer + for layer in &image.layer_data { + let (r,g,b,a) = layer.channel_data.pixels.first().unwrap().first().unwrap(); + + println!( + "top left color of layer `{}`: (r,g,b,a) = {:?}", + layer.attributes.layer_name.clone().unwrap_or_default(), + (r.to_f32(), g.to_f32(), b.to_f32(), a.to_f32()) + ) + } +} \ No newline at end of file diff --git a/examples/5_write_legacy_layers.rs b/examples/5_write_legacy_layers.rs index ec6bf297..77ef94b3 100644 --- a/examples/5_write_legacy_layers.rs +++ b/examples/5_write_legacy_layers.rs @@ -8,8 +8,6 @@ extern crate half; // exr imports extern crate exr; -// TODO create a dedicated reader and writer for this scenario - /// Generate an image with channel groups and write it to a file. /// Some legacy software may group layers that contain a `.` in the layer name. /// @@ -18,8 +16,6 @@ extern crate exr; /// fn main() { use exr::prelude::*; - // TODO simplify handling these types of layers using read() and write() - let size = Vec2(512, 512); let create_channel = |name: &str| -> AnyChannel { @@ -32,35 +28,30 @@ fn main() { }; - // The channels have the following structure: - // - // - Object - // - Red - // - Green - // - Blue - // - Alpha - - // - Background - // - Red - // - Green - // - Blue - - let foreground_r = create_channel("Object.R"); - let foreground_g = create_channel("Object.G"); - let foreground_b = create_channel("Object.B"); - let foreground_a = create_channel("Object.A"); - - let background_r = create_channel("Background.R"); - let background_g = create_channel("Background.G"); - let background_b = create_channel("Background.B"); - let layer = Layer::new( size, LayerAttributes::named("test-image"), Encoding::FAST_LOSSLESS, - AnyChannels::sort(smallvec![ // the order does not actually matter - foreground_r, foreground_g, foreground_b, foreground_a, - background_r, background_g, background_b + + ChannelGroups::from_list([ + ( + // the foreground layer will be rgba + "Foreground", + AnyChannels::sort(smallvec![ + create_channel("R"), create_channel("G"), + create_channel("B"), create_channel("A"), + ]) + ), + + ( + // the background layer will be rgb + "Background", + AnyChannels::sort(smallvec![ + create_channel("R"), + create_channel("G"), + create_channel("B") + ]) + ), ]), ); diff --git a/examples/7_custom_write.rs b/examples/7_custom_write.rs index 55a244f3..e8422ee2 100644 --- a/examples/7_custom_write.rs +++ b/examples/7_custom_write.rs @@ -70,24 +70,24 @@ fn main() { |meta_data, chunk_writer|{ - let blocks = meta_data.collect_ordered_blocks(|block_index|{ - let channel_description = &meta_data.headers[block_index.layer].channels; + let blocks = meta_data.collect_ordered_blocks(|header, block_index|{ + let channel_description = &header.channels; // fill the image file contents with one of the precomputed random values, // picking a different one per channel UncompressedBlock::from_lines(channel_description, block_index, |line_mut|{ // TODO iterate mut instead?? - let chan = line_mut.location.channel; + let channel_index = line_mut.location.channel; - if chan == 3 { // write time as depth (could also check for _meta.channels[chan].name == "Z") + if channel_description.list[channel_index].name.eq("Z") { // write time as depth line_mut.write_samples(|_| start_time.elapsed().as_secs_f32()) .expect("write to line bug"); } else { // write rgba color line_mut - .write_samples(|sample_index| random_values[(sample_index + chan) % random_values.len()]) + .write_samples(|sample_index| random_values[(sample_index + channel_index) % random_values.len()]) .expect("write to line bug"); } }) diff --git a/src/block/lines.rs b/src/block/lines.rs index 1cdf8eeb..4800f1fd 100644 --- a/src/block/lines.rs +++ b/src/block/lines.rs @@ -152,7 +152,7 @@ impl<'s> LineRefMut<'s> { debug_assert_eq!(slice.len(), self.location.sample_count, "slice size does not match the line width"); debug_assert_eq!(self.value.len(), self.location.sample_count * T::BYTE_SIZE, "sample type size does not match line byte size"); - T::write_slice(&mut Cursor::new(self.value), slice) + T::write_slice(&mut Cursor::new(self.value), slice) // TODO this cant fail, return no result? } /// Iterate over all samples in this line, from left to right. @@ -168,7 +168,7 @@ impl<'s> LineRefMut<'s> { let mut write = Cursor::new(self.value); for index in 0..self.location.sample_count { - T::write(get_sample(index), &mut write)?; + T::write(get_sample(index), &mut write)?; // TODO this cannot fail...? do not return a result? } Ok(()) diff --git a/src/block/mod.rs b/src/block/mod.rs index 1d20aa89..95b2bce1 100644 --- a/src/block/mod.rs +++ b/src/block/mod.rs @@ -25,6 +25,7 @@ use crate::block::chunk::{CompressedBlock, CompressedTileBlock, CompressedScanLi use crate::meta::header::Header; use crate::block::lines::{LineIndex, LineRef, LineSlice, LineRefMut}; use crate::meta::attribute::ChannelList; +use std::hash::Hash; /// Specifies where a block of pixel data should be placed in the actual image. @@ -40,7 +41,7 @@ pub struct BlockIndex { pub pixel_position: Vec2, /// Number of pixels in this block, extending to the right and downwards. - /// Stays the same across all resolution levels. + /// Stays the same across all resolution levels. Border tiles are smaller than average. pub pixel_size: Vec2, /// Index of the mip or rip level in the image. @@ -92,7 +93,7 @@ pub fn write( /// The blocks written to the file must be exactly in this order, /// except for when the `LineOrder` is unspecified. /// The index represents the block index, in increasing line order, within the header. -pub fn enumerate_ordered_header_block_indices(headers: &[Header]) -> impl '_ + Iterator { +pub fn enumerate_ordered_header_block_indices(headers: &[Header]) -> impl '_ + Iterator { headers.iter().enumerate().flat_map(|(layer_index, header)|{ header.enumerate_ordered_blocks().map(move |(index_in_header, tile)|{ let data_indices = header.get_absolute_block_pixel_coordinates(tile.location).expect("tile coordinate bug"); @@ -104,11 +105,17 @@ pub fn enumerate_ordered_header_block_indices(headers: &[Header]) -> impl '_ + I pixel_size: data_indices.size, }; - (index_in_header, block) + (header, index_in_header, block) }) }) } +impl BlockIndex { + /// The number of bytes required for the referenced uncompressed block + pub fn byte_size(&self, channels: &ChannelList) -> usize { + self.pixel_size.area() * channels.bytes_per_pixel + } +} impl UncompressedBlock { @@ -153,7 +160,7 @@ impl UncompressedBlock { let header: &Header = headers.get(index.layer) .expect("block layer index bug"); - let expected_byte_size = header.channels.bytes_per_pixel * self.index.pixel_size.area(); // TODO sampling?? + let expected_byte_size = self.index.byte_size(&header.channels); // header.channels.bytes_per_pixel * self.index.pixel_size.area(); // TODO sampling?? if expected_byte_size != data.len() { panic!("get_line byte size should be {} but was {}", expected_byte_size, data.len()); } @@ -224,6 +231,16 @@ impl UncompressedBlock { Ok(()) }*/ + /// Create an uncompressed block by filling bytes. + pub fn fill_block_data( + channels: &ChannelList, block_index: BlockIndex, + mut fill_bytes: impl FnMut(&mut[u8]) + ) -> Vec { + let mut block_bytes = vec![0_u8; block_index.byte_size(channels)]; + fill_bytes(block_bytes.as_mut_slice()); + block_bytes + } + // TODO from iterator?? /// Create an uncompressed block byte vector by requesting one line of samples after another. pub fn collect_block_data_from_lines( @@ -231,17 +248,14 @@ impl UncompressedBlock { mut extract_line: impl FnMut(LineRefMut<'_>) ) -> Vec { - let byte_count = block_index.pixel_size.area() * channels.bytes_per_pixel; - let mut block_bytes = vec![0_u8; byte_count]; - - for (byte_range, line_index) in LineIndex::lines_in_block(block_index, channels) { - extract_line(LineRefMut { // TODO subsampling - value: &mut block_bytes[byte_range], - location: line_index, - }); - } - - block_bytes + Self::fill_block_data(channels, block_index, |block_bytes|{ + for (byte_range, line_index) in LineIndex::lines_in_block(block_index, channels) { + extract_line(LineRefMut { // TODO subsampling + value: &mut block_bytes[byte_range], + location: line_index, + }); + } + }) } /// Create an uncompressed block by requesting one line of samples after another. diff --git a/src/image/channel_groups.rs b/src/image/channel_groups.rs index 7d743758..49afbe46 100644 --- a/src/image/channel_groups.rs +++ b/src/image/channel_groups.rs @@ -1,84 +1,130 @@ use std::collections::HashMap; use crate::image::write::channels::{WritableChannels, ChannelsWriter}; -use crate::meta::attribute::{LevelMode, ChannelList, Text, TextSlice, ChannelInfo}; +use crate::meta::attribute::{LevelMode, ChannelList, Text}; use crate::meta::header::Header; use crate::image::read::layers::{ReadChannels, ChannelsReader}; use crate::block::{BlockIndex, UncompressedBlock}; -use crate::block::lines::{collect_uncompressed_block_from_lines, LineIndex}; -use std::io::{Cursor, Read}; use crate::error::{Result, UnitResult}; use crate::block::chunk::TileCoordinates; -use crate::prelude::SmallVec; - - +use crate::prelude::{SmallVec, ChannelDescription}; +use crate::math::RoundingMode; +use crate::image::read::image::ChannelMask; +use std::iter::FromIterator; +use crate::image::AnyChannels; + +pub trait ReadGroupedChannels: Sized { + fn grouped_channels(self) -> ReadChannelGroups { + ReadChannelGroups { read_channels: self } + } +} +impl ReadGroupedChannels for T + where T: ReadChannels {} -pub struct ChannelGroups { - channel_group: Option, - children: HashMap +#[derive(Default, Eq, PartialEq, Debug)] +pub struct ChannelGroups { + pub channels: Option, + pub children: HashMap } -impl ChannelGroups { - +impl ChannelGroups { - // pub fn visit_groups_mut(&mut self, visitor: impl Fn(&mut Channels)) { + // pub fn insert(&mut self, parent_group_name: Text, channels: Channels){ + // // } + // TODO other construction methods + pub fn from_list>(named_groups: impl IntoIterator) -> Self { + Self { channels: None, children: HashMap::from_iter(named_groups) } + } - - pub fn groups(&self) -> SmallVec<[&ChannelGroup; 12]> { - let children = self.children.iter().flat_map(|group| group.groups()); - self.channel_group.iter().chain(children).collect() + // TODO depth first or not? + pub fn all_channel_groups(&self) -> impl Iterator { + // TODO https://fasterthanli.me/articles/recursive-iterators-rust + self.children.iter() + .flat_map(|(_, child)| child.all_channel_groups()) + .chain(self.channels.iter()) + .collect::>().into_iter() } - pub fn lookup_group(&self, group_name: &TextSlice) -> Option<&ChannelGroup> { - let dot_index = group_name.iter().position('.'); - if let Some(dot_index) = dot_index { - let group_name = &group_name[.. dot_index]; - let child_name = &group_name[dot_index + 1 ..]; - self.children.get(group_name) - .and_then(|child| child.lookup(child_name)) - } - else { - self.channel_group.lookup(name) - } + // TODO depth first or not? + pub fn all_channel_groups_mut(&mut self) -> impl Iterator { + // TODO https://fasterthanli.me/articles/recursive-iterators-rust + self.children.iter_mut() + .flat_map(|(_, child)| child.all_channel_groups_mut()) + .chain(self.channels.iter_mut()) + .collect::>().into_iter() } + /*TODO pub fn lookup_channel_group(&self, group_name: &TextSlice) -> Option<&Channels> { + let dot_index = group_name.iter().position(|&character| character == '.' as u8); - /*pub fn insert_group(&mut self, full_name: &TextSlice, value: ChannelGroup) { - let dot_index = full_name.iter().position('.'); if let Some(dot_index) = dot_index { let group_name = &group_name[.. dot_index]; - let name_rest = &group_name[dot_index + 1 ..]; - - self.children.entry(Text::from_slice_unchecked(group_name)) - .or_insert(|| ); - - // self.children.insert(Text::from_slice_unchecked(group_name), value) - // .and_then(|child| child.lookup(name_rest)); + let child_name = &group_name[dot_index + 1 ..]; + self.child_groups.get(group_name) + .and_then(|child| child.lookup_channel_group(child_name)) } - else { - self.channel_group.lookup(name); + else { // arrived at last identifier + self.own_channels.as_ref() } }*/ - pub fn map(self, mapper: impl FnMut(ChannelGroup) -> T) -> ChannelGroups { + + + fn map(self, mut mapper: impl FnMut(Channels) -> T) -> ChannelGroups { ChannelGroups { - children: self.channel_group.iter().map(&mapper).collect(), - channel_group: self.channel_group.map(mapper), + channels: self.channels.map(&mut mapper), + children: self.children.into_iter() + .map(|(name, child)| (name, child.map(&mut mapper))) + .collect(), } } + + fn try_map(self, mut mapper: impl FnMut(Channels) -> Result) -> Result> { + let channels = match self.channels { + Some(channels) => Some(mapper(channels)?), + None => None, + }; + + let new_child_groups = HashMap::with_capacity(self.children.len()); + let child_groups = self.children.into_iter() + .map(|(name, child)| Ok((name, child.try_map(&mut mapper)?))) + .try_fold( + new_child_groups, + |mut map: HashMap>, item: Result<(Text, ChannelGroups)>| { + // TODO this is complicated! + item.map(move |(k,v)| { + map.insert(k,v); + map + }) + } + )?; + + Ok(ChannelGroups { channels, children: child_groups, }) + } } +type SmallIndicesVec = SmallVec<[usize; 12]>; -pub fn parse_channel_list_groups(channels: impl Iterator) - -> ChannelGroups> -{ - fn insert_into_groups(groups: &mut ChannelGroups>, name: Text, value: T) { - let dot_index = name.as_slice().iter().position('.'); +impl ChannelGroups { + + // returns indices that reference the argument items + pub fn parse_list_to_indices(channels: impl Iterator) -> Self { + channels.enumerate().fold( + ChannelGroups::default(), + |mut groups, (index, name)|{ + groups.insert_channel_index(name, index); + groups + } + ) + } + + fn insert_channel_index(&mut self, name: Text, item_index: usize) { + let dot_index = name.as_slice().iter().position(|&character| character == '.' as u8); if let Some(dot_index) = dot_index { // insert into child group @@ -86,67 +132,71 @@ pub fn parse_channel_list_groups(channels: impl Iterator) let group_name = Text::from_slice_unchecked(&name.as_slice()[.. dot_index]); let child_channel = Text::from_slice_unchecked(&name.as_slice()[dot_index + 1 ..]); - let child_group = groups.children.entry(group_name) - .or_insert(ChannelGroups { channel_group: None, children: Default::default() }); + let child_group = self.children.entry(group_name) + .or_insert_with(ChannelGroups::default); - insert_into_groups(child_group, child_channel, value); + child_group.insert_channel_index(child_channel, item_index); } else { // insert directly into group - - if groups.channel_group.is_none() { - groups.channel_group = Some(SmallVec::new()); - } - - groups.channel_group.unwrap().push(value); + let groups = self.channels.get_or_insert_with(SmallIndicesVec::new); + groups.push(item_index); } } - - let mut result = ChannelGroups { channel_group: None, children: HashMap::default() }; - for (name, value) in channels { insert_into_groups(&mut result, name, value); } - result } -impl<'slf, ChannelGroup> WritableChannels<'slf> for ChannelGroups - where ChannelGroup: WritableChannels<'slf> -{ - fn infer_channel_list(&self) -> ChannelList { - // TODO what about empty groups with NO channels?? +impl<'slf, Channels> ChannelGroups where Channels: WritableChannels<'slf> { + // TODO reduce tuples and make simpler + pub fn absolute_names_unsorted( + &self, + to_channels: impl Fn(&Channels) -> SmallVec<[Channel;5]>, + channel_name: impl Fn(&mut Channel) -> &mut Text, + ) -> SmallVec<[Channel;5]> { + let child_channels = self.children.iter().flat_map(|(child_group_name, child_group)| { + let mut children = child_group.absolute_names_unsorted(&to_channels, &channel_name); + + for channel in &mut children { + channel_name(channel).push_front( + child_group_name.as_slice().iter().cloned().chain("." as u8) + ); + } - let child_channels = self.children.iter().flat_map(|(group_name, child)| { - let mut child_channels = child.infer_channel_list().list; - for channel in &mut child_channels { channel.name.push_front(group_name) }; - child_channels + children }); - let mut own_channels = self.channel_group - .map(|chans| chans.infer_channel_list().list) - .unwrap_or_default(); + let own_channels = self.channels.iter() + // TODO check empty and throw? + .flat_map(|own| to_channels(own)); - own_channels.extend(child_channels); - own_channels.sort_unstable(); // TODO only once at end - ChannelList::new(own_channels) // might be empty, but will be checked in MetaData::validate() + child_channels.chain(own_channels) + .collect() } +} - fn level_mode(&self) -> LevelMode { - fn find_mode_or_none(channels: &Self) -> Option { - channels.channel_group.map(WritableChannels::level_mode).or_else(|| { - channels.children.iter().map(find_mode_or_none).next() - }) - } +impl<'slf, ChannelGroup> WritableChannels<'slf> for ChannelGroups + where ChannelGroup: WritableChannelGroup<'slf> +{ + fn infer_channel_list(&self) -> ChannelList { + let mut all_channels: SmallVec<[ChannelDescription; 5]> = self + .absolute_names_unsorted( + |chans| chans.infer_channel_list().list.clone(), + |channel| &mut channel.name + ) + .collect(); - let mode = find_mode_or_none(self) - .expect("empty channel groups (check failed)"); // TODO only happens for empty channels, right? panic maybe? + all_channels.sort_by_key(|chan| chan.name.clone()); // TODO borrow? // TODO check empty and throw? + ChannelList::new(all_channels) // might be empty, but will be checked in MetaData::validate() + } - if let Some(chans) = self.channel_group.as_ref() { - debug_assert_eq!(chans.level_mode(), mode, "level mode must be equal for all legacy channel groups") - } + /// Generate the file meta data of whether and how resolution levels should be stored in the file + fn infer_level_modes(&self) -> (LevelMode, RoundingMode) { + let mode = self.all_channel_groups().map(WritableChannels::infer_level_modes) + .next().expect("empty channel groups (check failed)"); // TODO only happens for empty channels, right? panic maybe? debug_assert!( - self.children.values() - .flat_map(find_mode_or_none) + self.all_channel_groups().map(WritableChannels::infer_level_modes) .all(|child_mode| child_mode == mode), "level mode must be equal for all legacy channel groups" @@ -155,43 +205,42 @@ impl<'slf, ChannelGroup> WritableChannels<'slf> for ChannelGroups mode } - type Writer = GroupChannelsWriter<'slf, ChannelGroup>; + type Writer = GroupChannelsWriter; fn create_writer(&'slf self, header: &Header) -> Self::Writer { - let channels = header.channels.list.iter() - .map(|channel_info|{ - // hashmap order is not guaranteed? so look up each channel group manually instead of generating new - let channels = self.lookup_group(channel_info.name.as_slice()) - .expect("channels not found bug"); - - channels.create_writer(header) // channel_info.name.clone() - }) - .collect(); - - GroupChannelsWriter { channels_list: channels } + GroupChannelsWriter { + all_channel_groups: self.all_channel_groups() + .map(|channel_group: &ChannelGroup| panic!("this uses relative names but expects absolute names, and all will write first byte")/*channel_group.create_channel_group_writer(header)*/) + .collect() + } } } -struct GroupChannelsWriter<'c, ChannelGroupWriter> { - channels_list: Vec<&'c ChannelGroupWriter>, + +pub trait WritableChannelGroup<'slf>: WritableChannels { + fn create_channel_group_writer(&'slf self, header: &Header, channel_indices: &[usize]) + -> ::Writer; } -impl<'c, Channels> ChannelsWriter for GroupChannelsWriter<'c, Channels> where Channels: ChannelsWriter { - fn extract_uncompressed_block(&self, header: &Header, block: BlockIndex) -> Vec { - let mut blocks_per_channel: Vec>> = self - .channels_list.iter() - .map(|channels| Cursor::new(channels.extract_uncompressed_block(header, block))) - .collect(); +impl<'slf> WritableChannelGroup<'slf> for AnyChannels + where AnyChannels: WritableChannels<'slf> +{ + fn create_channel_group_writer(&'slf self, header: &Header, channel_indices: &[usize]) -> Self::Writer { + self.create_writer(header) + } +} - UncompressedBlock::uncompressed_block_from_lines(header, block, |line|{ - let channel_reader = &mut blocks_per_channel[line.location.channel]; // TODO subsampling - // read from specific channel into total byte block - // this assumes that the lines in the callback are iterated in strictly increasing order - // because each channel reader is consumed - channel_reader.read_exact(line.value) - .expect("collecting grouped channel byte block failed"); - }) + +pub struct GroupChannelsWriter { + all_channel_groups: Vec, +} + +impl ChannelsWriter for GroupChannelsWriter where Channels: ChannelsWriter { + fn extract_uncompressed_block(&self, header: &Header, block_index: BlockIndex, output_block_data: &mut [u8]) { + for channels_group in &self.all_channel_groups { + channels_group.extract_uncompressed_block(header, block_index, output_block_data); + } } } @@ -201,8 +250,11 @@ struct ReadChannelGroups { } struct ChannelGroupsReader { - channels: ChannelGroups, - indexed_channels: Vec, + channels: ChannelGroups, + + // TODO optimize by iterating a vec instead of the nested groups: + //channels: Groups, + //indexed_channels: Vec, } impl<'s, ReadChannelGroup> ReadChannels<'s> for ReadChannelGroups @@ -210,58 +262,62 @@ impl<'s, ReadChannelGroup> ReadChannels<'s> for ReadChannelGroups; - fn create_channels_reader(&'s self, header: &Header) -> Result { - let swap = |(a,b)| (b,a); - let channel_groups = parse_channel_list_groups( - header.channels.list.iter().enumerate().map(swap) - ); + fn create_channels_reader(&'s self, header: &Header, selected_channels_indices: &ChannelMask) -> Result { + let selected_channels_indices = selected_channels_indices + .selected_channel_indices().collect::>(); - let mut indexed_channels = Vec::new(); - let channel_groups = channel_groups.map(|channels| { + // indices refer to `selected_channels_indices` + let channel_groups = ChannelGroups::parse_list_to_indices( + selected_channels_indices.iter() + .map(|&index| &header.channels.list[index]) + .map(|selected_channel| selected_channel.name.clone()) + ); - let mut channels_header = header.clone(); // TODO no clone? - channels_header.channels = ChannelList::new(channels.iter().map(|(name, index)|{ - let mut channel_info = header.channels.list[index].clone(); - channel_info.name = name; - channel_info - }).collect()); // FIXME does not comply to `header.chunk_count` and that stuff?? change ReadChannels fn signature? + Ok(ChannelGroupsReader { + // own_channels_indices refer to `selected_channels_indices` + channels: channel_groups.try_map(|group_own_channel_indices|{ - indexed_channels.push(self.read_channels.create_channels_reader(&channels_header)); + let group_selected_channel_indices = group_own_channel_indices.iter() + .map(|&index| selected_channels_indices[index]); - // FIXME this is not the original order indexed_channels.len() - 1 - indexed_channels[] - }); + let group_selected_channel_indices = ChannelMask::only(group_selected_channel_indices); - Ok(ChannelGroupsReader { - channels: channel_groups, - indexed_channels, + let reader = self.read_channels.create_channels_reader(header, &group_selected_channel_indices); + reader + })? }) - - /*Ok(ChannelGroupsReader { - channels: header.channels.list.iter().map(|channel| { - let mut channels_header = header.clone(); - - let reader = self.read_channels.create_channels_reader(&channels_header); - (channels_header, reader) - }).collect(), - })*/ } } impl ChannelsReader for ChannelGroupsReader where ChannelGroupReader: ChannelsReader { type Channels = ChannelGroups; - fn filter_block(&self, tile: (usize, &TileCoordinates)) -> bool { - self.indexed_channels.iter().any(|channel| channel.filter_block(tile)) + fn is_block_desired(&self, tile: TileCoordinates) -> bool { + // TODO linear memory iterator + self.channels.all_channel_groups().any(|channel_group| channel_group.is_block_desired(tile)) } - fn read_block(&mut self, header: &Header, block: UncompressedBlock) -> UnitResult { - block.for_lines(|line|{ + // for every incoming block, all the children read the lines they want into their temporary storage + fn read_block(&mut self, header: &Header, block: &UncompressedBlock) -> UnitResult { + for channel in self.channels.all_channel_groups_mut() { // TODO linear memory iterator + channel.read_block(header, block)?; + } - }) + Ok(()) } fn into_channels(self) -> Self::Channels { + self.channels.map(|channel_group_reader| channel_group_reader.into_channels()) + } +} + + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn parse(){ } } \ No newline at end of file diff --git a/src/image/crop.rs b/src/image/crop.rs index 63aadbf3..ba0362b1 100644 --- a/src/image/crop.rs +++ b/src/image/crop.rs @@ -188,13 +188,13 @@ pub struct CroppedWriter { } impl<'c, Channels> ChannelsWriter for CroppedWriter where Channels: ChannelsWriter { - fn extract_uncompressed_block(&self, header: &Header, block: BlockIndex) -> Vec { + fn extract_uncompressed_block(&self, header: &Header, block: BlockIndex, output_block_data: &mut [u8]) { let block = BlockIndex { pixel_position: block.pixel_position + self.offset, .. block }; - self.channels.extract_uncompressed_block(header, block) + self.channels.extract_uncompressed_block(header, block, output_block_data); } } diff --git a/src/image/mod.rs b/src/image/mod.rs index a4e3cf3d..047303ae 100644 --- a/src/image/mod.rs +++ b/src/image/mod.rs @@ -28,7 +28,7 @@ pub mod write; pub mod crop; pub mod pixel_vec; pub mod recursive; -// pub mod channel_groups; +pub mod channel_groups; use crate::meta::header::{ImageAttributes, LayerAttributes}; diff --git a/src/image/read/any_channels.rs b/src/image/read/any_channels.rs index 054a7c35..41ba3358 100644 --- a/src/image/read/any_channels.rs +++ b/src/image/read/any_channels.rs @@ -9,6 +9,7 @@ use crate::math::Vec2; use crate::meta::attribute::{Text, ChannelDescription}; use crate::image::read::layers::{ReadChannels, ChannelsReader}; use crate::block::chunk::TileCoordinates; +use crate::prelude::read::image::ChannelMask; /// A template that creates an [AnyChannelsReader] for each layer in the image. /// This loads all channels for each layer. @@ -35,8 +36,9 @@ pub trait ReadSamples { #[derive(Debug, Clone, Eq, PartialEq)] pub struct AnyChannelsReader { - /// Stores a separate sample reader per channel in the layer - sample_channels_reader: SmallVec<[AnyChannelReader; 4]>, + /// Stores a separate sample reader per channel in the layer. + /// Only selected channels contain some value, ignored channels contain none. + sample_channels_reader: SmallVec<[Option>; 4]>, } /// Processes pixel blocks from a file and accumulates them into a single arbitrary channel. @@ -64,7 +66,7 @@ pub trait SamplesReader { type Samples; /// Specify whether a single block of pixels should be loaded from the file - fn filter_block(&self, tile: TileCoordinates) -> bool; + fn is_block_desired(&self, tile: TileCoordinates) -> bool; /// Load a single pixel line, which has not been filtered, into the reader, accumulating the sample data fn read_line(&mut self, line: LineRef<'_>) -> UnitResult; @@ -77,13 +79,20 @@ pub trait SamplesReader { impl<'s, S: 's + ReadSamples> ReadChannels<'s> for ReadAnyChannels { type Reader = AnyChannelsReader; - fn create_channels_reader(&self, header: &Header) -> Result { - let samples: Result<_> = header.channels.list.iter() - .map(|channel: &ChannelDescription| Ok(AnyChannelReader { - samples: self.read_samples.create_sample_reader(header, channel)?, - name: channel.name.clone(), - sampling_rate: channel.sampling, - quantize_linearly: channel.quantize_linearly + fn create_channels_reader(&self, header: &Header, selected_channels_indices: &ChannelMask) -> Result { + let samples: Result<_> = header.channels.list.iter().enumerate() + .map(|(channel_index, channel)| Ok({ + if selected_channels_indices.is_selected(channel_index){ + Some(AnyChannelReader { + samples: self.read_samples.create_sample_reader(header, channel)?, + name: channel.name.clone(), + sampling_rate: channel.sampling, + quantize_linearly: channel.quantize_linearly + }) + } + else { + None + } })) .collect(); @@ -94,19 +103,15 @@ impl<'s, S: 's + ReadSamples> ReadChannels<'s> for ReadAnyChannels { impl ChannelsReader for AnyChannelsReader { type Channels = AnyChannels; - fn filter_block(&self, tile: TileCoordinates) -> bool { - self.sample_channels_reader.iter().any(|channel| channel.samples.filter_block(tile)) + fn is_block_desired(&self, tile: TileCoordinates) -> bool { + self.sample_channels_reader.iter().flatten().any(|channel| channel.samples.is_block_desired(tile)) } - fn read_block(&mut self, header: &Header, decompressed: UncompressedBlock) -> UnitResult { - /*for (bytes, line) in LineIndex::lines_in_block(decompressed.index, header) { - let channel = self.sample_channels_reader.get_mut(line.channel).unwrap(); - channel.samples.read_line(LineSlice { location: line, value: &decompressed.data[bytes] })?; - } - - Ok(())*/ + fn read_block(&mut self, header: &Header, decompressed: &UncompressedBlock) -> UnitResult { for line in decompressed.lines(&header.channels) { - self.sample_channels_reader[line.location.channel].samples.read_line(line)?; + if let Some(channel) = &mut self.sample_channels_reader[line.location.channel] { + channel.samples.read_line(line)?; + } } Ok(()) @@ -114,7 +119,7 @@ impl ChannelsReader for AnyChannelsReader { fn into_channels(self) -> Self::Channels { AnyChannels { // not using `new()` as the channels are already sorted - list: self.sample_channels_reader.into_iter() + list: self.sample_channels_reader.into_iter().flatten() .map(|channel| AnyChannel { sample_data: channel.samples.into_samples(), diff --git a/src/image/read/image.rs b/src/image/read/image.rs index fce2f527..b40402da 100644 --- a/src/image/read/image.rs +++ b/src/image/read/image.rs @@ -11,6 +11,8 @@ use std::io::{Read, BufReader}; use std::io::Seek; use crate::meta::MetaData; use crate::block::reader::ChunksReader; +use std::collections::BTreeSet; +use std::iter::FromIterator; /// Specify whether to read the image in parallel, /// whether to use pedantic error handling, @@ -121,12 +123,12 @@ impl ReadImage where F: FnMut(f64) // TODO propagate send requirement further upwards if parallel { block_reader.decompress_parallel(pedantic, |meta_data, block|{ - image_collector.read_block(&meta_data.headers, block) + image_collector.read_block(&meta_data.headers, &block) })?; } else { block_reader.decompress_sequential(pedantic, |meta_data, block|{ - image_collector.read_block(&meta_data.headers, block) + image_collector.read_block(&meta_data.headers, &block) })?; } @@ -158,7 +160,7 @@ impl ImageWithAttributesReader where L: LayersReader { } /// Load a single pixel block, which has not been filtered, into the reader, accumulating the image - fn read_block(&mut self, headers: &[Header], block: UncompressedBlock) -> UnitResult { + fn read_block(&mut self, headers: &[Header], block: &UncompressedBlock) -> UnitResult { self.layers_reader.read_block(headers, block) } @@ -171,6 +173,33 @@ impl ImageWithAttributesReader where L: LayersReader { } } +/// Stores which channels to ignore when loading and image. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ChannelMask { + valid_indices: BTreeSet, // TODO optimize for case where everything is selected? +} + +impl ChannelMask { + /// Include all channels in the specified range. + pub fn all(channel_count: usize) -> Self { + Self { valid_indices: BTreeSet::from_iter(0..channel_count) } + } + + /// Include only the specified channels. + pub fn only(channels: impl IntoIterator) -> Self { + Self { valid_indices: BTreeSet::from_iter(channels.into_iter()) } + } + + /// Is a specific channel selected? + pub fn is_selected(&self, channel_index: usize) -> bool { + self.valid_indices.contains(&channel_index) + } + + /// Iterate all selected channels. + pub fn selected_channel_indices(&self) -> impl '_ + Iterator { + self.valid_indices.iter().cloned() + } +} /// A template that creates a `LayerReader` for each layer in the file. pub trait ReadLayers<'s> { @@ -201,7 +230,7 @@ pub trait LayersReader { fn filter_block(&self, meta: &MetaData, tile: TileCoordinates, block: BlockIndex) -> bool; /// Load a single pixel block, which has not been filtered, into the reader, accumulating the layer - fn read_block(&mut self, headers: &[Header], block: UncompressedBlock) -> UnitResult; + fn read_block(&mut self, headers: &[Header], block: &UncompressedBlock) -> UnitResult; /// Deliver the final accumulated layers for the image fn into_layers(self) -> Self::Layers; diff --git a/src/image/read/layers.rs b/src/image/read/layers.rs index 75159c2e..d6f0150d 100644 --- a/src/image/read/layers.rs +++ b/src/image/read/layers.rs @@ -8,6 +8,7 @@ use crate::math::Vec2; use crate::image::read::image::{ReadLayers, LayersReader}; use crate::block::chunk::TileCoordinates; use crate::meta::MetaData; +use crate::prelude::read::image::ChannelMask; /// Specify to read all channels, aborting if any one is invalid. /// [`ReadRgbaChannels`] or [`ReadAnyChannels`]. @@ -34,12 +35,14 @@ pub trait ReadChannels<'s> { type Reader: ChannelsReader; /// Create a single reader for all channels of a specific layer - fn create_channels_reader(&'s self, header: &Header) -> Result; + fn create_channels_reader(&'s self, header: &Header, selected_channels_indices: &ChannelMask) -> Result; /// Read only the first layer which meets the previously specified requirements /// For example, skips layers with deep data, if specified earlier. /// Aborts if the image contains no layers. + /// If certain channels are required by the caller but missing in the file, + /// the layer is also considered invalid. // TODO test if this filters non-deep layers while ignoring deep data layers! fn first_valid_layer(self) -> ReadFirstValidLayer where Self:Sized { ReadFirstValidLayer { read_channels: self } } @@ -89,10 +92,11 @@ pub trait ChannelsReader { type Channels; /// Specify whether a single block of pixels should be loaded from the file - fn filter_block(&self, tile: TileCoordinates) -> bool; + fn is_block_desired(&self, tile: TileCoordinates) -> bool; - /// Load a single pixel block, which has not been filtered, into the reader, accumulating the channel data - fn read_block(&mut self, header: &Header, block: UncompressedBlock) -> UnitResult; + /// Load a single block of pixels that passed the filters into the reader, + /// slowly accumulating the pixel data with each call + fn read_block(&mut self, header: &Header, block: &UncompressedBlock) -> UnitResult; /// Deliver the final accumulated channel collection for the image fn into_channels(self) -> Self::Channels; @@ -123,7 +127,7 @@ impl<'s, C> ReadLayers<'s> for ReadAllLayers where C: ReadChannels<'s> { fn create_layers_reader(&'s self, headers: &[Header]) -> Result { let readers: Result<_> = headers.iter() - .map(|header| LayerReader::new(header, self.read_channels.create_channels_reader(header)?)) + .map(|header| LayerReader::new(header, self.read_channels.create_channels_reader(header, &ChannelMask::all(header.channels.list.len()))?)) .collect(); Ok(AllLayersReader { @@ -137,10 +141,10 @@ impl LayersReader for AllLayersReader where C: ChannelsReader { fn filter_block(&self, _: &MetaData, tile: TileCoordinates, block: BlockIndex) -> bool { let layer = self.layer_readers.get(block.layer).expect("invalid layer index argument"); - layer.channels_reader.filter_block(tile) + layer.channels_reader.is_block_desired(tile) } - fn read_block(&mut self, headers: &[Header], block: UncompressedBlock) -> UnitResult { + fn read_block(&mut self, headers: &[Header], block: &UncompressedBlock) -> UnitResult { self.layer_readers .get_mut(block.index.layer).expect("invalid layer index argument") .channels_reader.read_block(headers.get(block.index.layer).expect("invalid header index in block"), block) @@ -167,7 +171,7 @@ impl<'s, C> ReadLayers<'s> for ReadFirstValidLayer where C: ReadChannels<'s> fn create_layers_reader(&'s self, headers: &[Header]) -> Result { headers.iter().enumerate() .flat_map(|(index, header)| - self.read_channels.create_channels_reader(header) + self.read_channels.create_channels_reader(header, &ChannelMask::all(header.channels.list.len())) .and_then(|reader| Ok(FirstValidLayerReader { layer_reader: LayerReader::new(header, reader)?, layer_index: index @@ -184,10 +188,10 @@ impl LayersReader for FirstValidLayerReader where C: ChannelsReader { type Layers = Layer; fn filter_block(&self, _: &MetaData, tile: TileCoordinates, block: BlockIndex) -> bool { - block.layer == self.layer_index && self.layer_reader.channels_reader.filter_block(tile) + block.layer == self.layer_index && self.layer_reader.channels_reader.is_block_desired(tile) } - fn read_block(&mut self, headers: &[Header], block: UncompressedBlock) -> UnitResult { + fn read_block(&mut self, headers: &[Header], block: &UncompressedBlock) -> UnitResult { debug_assert_eq!(block.index.layer, self.layer_index, "block should have been filtered out"); self.layer_reader.channels_reader.read_block(&headers[self.layer_index], block) } diff --git a/src/image/read/levels.rs b/src/image/read/levels.rs index 5705903c..91a2ed19 100644 --- a/src/image/read/levels.rs +++ b/src/image/read/levels.rs @@ -192,7 +192,7 @@ impl ReadSamples for ReadAllLevels { impl SamplesReader for AllLevelsReader { type Samples = Levels; - fn filter_block(&self, _: TileCoordinates) -> bool { + fn is_block_desired(&self, _: TileCoordinates) -> bool { true } diff --git a/src/image/read/samples.rs b/src/image/read/samples.rs index e03c3ccb..2049b772 100644 --- a/src/image/read/samples.rs +++ b/src/image/read/samples.rs @@ -75,7 +75,7 @@ impl ReadSamplesLevel for ReadFlatSamples { impl SamplesReader for FlatSamplesReader { type Samples = FlatSamples; - fn filter_block(&self, tile: TileCoordinates) -> bool { + fn is_block_desired(&self, tile: TileCoordinates) -> bool { tile.level_index == self.level } diff --git a/src/image/read/specific_channels.rs b/src/image/read/specific_channels.rs index cc7f1abc..f52d3548 100644 --- a/src/image/read/specific_channels.rs +++ b/src/image/read/specific_channels.rs @@ -12,6 +12,7 @@ use crate::image::read::layers::{ChannelsReader, ReadChannels}; use crate::block::chunk::TileCoordinates; use std::marker::PhantomData; +use crate::prelude::read::image::ChannelMask; /// Can be attached one more channel reader. @@ -24,7 +25,7 @@ pub trait ReadSpecificChannel: Sized + CheckDuplicates { type RecursivePixelReader: RecursivePixelReader; /// Create a separate internal reader for the pixels of the specific channel combination. - fn create_recursive_reader(&self, channels: &ChannelList) -> Result; + fn create_recursive_reader(&self, channels: &ChannelList, selected_channel_indices: &ChannelMask) -> Result; /// Plan to read an additional channel from the image, with the specified name. /// If the channel cannot be found in the image when the image is read, the image will not be loaded. @@ -145,10 +146,10 @@ ReadChannels<'s> for CollectPixels; - fn create_channels_reader(&'s self, header: &Header) -> Result { + fn create_channels_reader(&'s self, header: &Header, selected_channel_indices: &ChannelMask) -> Result { if header.deep { return Err(Error::invalid("`SpecificChannels` does not support deep data yet")) } - let pixel_reader = self.read_channels.create_recursive_reader(&header.channels)?; + let pixel_reader = self.read_channels.create_recursive_reader(&header.channels, selected_channel_indices)?; let channel_descriptions = pixel_reader.get_descriptions().into_non_recursive();// TODO not call this twice let create = &self.create_pixels; @@ -181,9 +182,9 @@ ChannelsReader for SpecificChannelsReader::NonRecursive>; - fn filter_block(&self, tile: TileCoordinates) -> bool { tile.is_largest_resolution_level() } // TODO all levels + fn is_block_desired(&self, tile: TileCoordinates) -> bool { tile.is_largest_resolution_level() } // TODO all levels - fn read_block(&mut self, header: &Header, block: UncompressedBlock) -> UnitResult { + fn read_block(&mut self, header: &Header, block: &UncompressedBlock) -> UnitResult { let mut pixels = vec![PxReader::RecursivePixel::default(); block.index.pixel_size.width()]; // TODO allocate once in self let byte_lines = block.data.chunks_exact(header.channels.bytes_per_pixel * block.index.pixel_size.width()); @@ -214,7 +215,25 @@ pub type ReadZeroChannels = NoneMore; impl ReadSpecificChannel for NoneMore { type RecursivePixelReader = NoneMore; - fn create_recursive_reader(&self, _: &ChannelList) -> Result { Ok(NoneMore) } + fn create_recursive_reader(&self, _: &ChannelList, _: &ChannelMask) -> Result { Ok(NoneMore) } +} + +fn find_channel_with_offset(channels: &ChannelList, selected_channel_indices: &ChannelMask, channel_name: &Text) + -> Option> +{ + channels.channels_with_byte_offset() + + // search only in selected channels, fails if no matching channels have been selected + .enumerate().filter_map(|(channel_index, channel_with_offset)| { + if selected_channel_indices.is_selected(channel_index) + { Some(channel_with_offset) } else { None } + }) + + .find(|(_, channel)| &channel.name == channel_name) + .map(|(channel_byte_offset, channel)| SampleReader { + channel_byte_offset, channel: channel.clone(), + px: Default::default() + }) } impl ReadSpecificChannel for ReadOptionalChannel @@ -222,16 +241,11 @@ impl ReadSpecificChannel for ReadOptionalChannel>; - fn create_recursive_reader(&self, channels: &ChannelList) -> Result { + fn create_recursive_reader(&self, channels: &ChannelList, selected_channel_indices: &ChannelMask) -> Result { debug_assert!(self.previous_channels.already_contains(&self.channel_name).not(), "duplicate channel name: {}", self.channel_name); - let inner_samples_reader = self.previous_channels.create_recursive_reader(channels)?; - let reader = channels.channels_with_byte_offset() - .find(|(_, channel)| channel.name == self.channel_name) - .map(|(channel_byte_offset, channel)| SampleReader { - channel_byte_offset, channel: channel.clone(), - px: Default::default() - }); + let inner_samples_reader = self.previous_channels.create_recursive_reader(channels, selected_channel_indices)?; + let reader = find_channel_with_offset(channels, selected_channel_indices, &self.channel_name); Ok(Recursive::new(inner_samples_reader, OptionalSampleReader { reader, default_sample: self.default_sample, @@ -244,16 +258,18 @@ impl ReadSpecificChannel for ReadRequiredChannel>; - fn create_recursive_reader(&self, channels: &ChannelList) -> Result { - let previous_samples_reader = self.previous_channels.create_recursive_reader(channels)?; - let (channel_byte_offset, channel) = channels.channels_with_byte_offset() - .find(|(_, channel)| channel.name == self.channel_name) - .ok_or_else(|| Error::invalid(format!( - "layer does not contain all of your specified channels (`{}` is missing)", - self.channel_name - )))?; + // TODO deduplicate logic with above function + fn create_recursive_reader(&self, channels: &ChannelList, selected_channel_indices: &ChannelMask) -> Result { + debug_assert!(self.previous_channels.already_contains(&self.channel_name).not(), "duplicate channel name: {}", self.channel_name); + + let previous_samples_reader = self.previous_channels.create_recursive_reader(channels, selected_channel_indices)?; + let reader = find_channel_with_offset(channels, selected_channel_indices, &self.channel_name) + .ok_or_else(|| Error::invalid(format!( + "layer does not contain all of your specified channels (`{}` is missing)", + self.channel_name + )))?; - Ok(Recursive::new(previous_samples_reader, SampleReader { channel_byte_offset, channel: channel.clone(), px: Default::default() })) + Ok(Recursive::new(previous_samples_reader, reader)) } } diff --git a/src/image/write/channels.rs b/src/image/write/channels.rs index 2450f09c..e170fa75 100644 --- a/src/image/write/channels.rs +++ b/src/image/write/channels.rs @@ -10,6 +10,7 @@ use crate::block::samples::*; use crate::image::write::samples::*; use std::marker::PhantomData; +use crate::block::lines::{LineIndex, LineRefMut}; /// Enables an image containing this list of channels to be written to a file. @@ -31,8 +32,8 @@ pub trait WritableChannels<'slf> { /// A temporary writer for a list of channels pub trait ChannelsWriter: Sync { - /// Deliver a block of pixels, containing all channel data, to be stored in the file - fn extract_uncompressed_block(&self, header: &Header, block: BlockIndex) -> Vec; // TODO return uncompressed block? + /// Fill a block of bytes with pixels, containing all channel data, to be stored in the file. + fn extract_uncompressed_block(&self, header: &Header, block: BlockIndex, block_data: &mut [u8]); } @@ -98,10 +99,15 @@ pub struct AnyChannelsWriter { } impl ChannelsWriter for AnyChannelsWriter where Samples: SamplesWriter { - fn extract_uncompressed_block(&self, header: &Header, block_index: BlockIndex) -> Vec { - UncompressedBlock::collect_block_data_from_lines(&header.channels, block_index, |line_ref| { - self.channels[line_ref.location.channel].extract_line(line_ref) - }) + fn extract_uncompressed_block(&self, header: &Header, block_index: BlockIndex, block_bytes: &mut [u8]) { + for (byte_range, line_index) in LineIndex::lines_in_block(block_index, &header.channels) { + let line_ref = LineRefMut { // TODO subsampling + value: &mut block_bytes[byte_range], + location: line_index, + }; + + self.channels[line_ref.location.channel].extract_line(line_ref); + } } } @@ -168,10 +174,7 @@ for SpecificChannelsWriter<'channels, PxWriter, Storage, Channels> Storage::Pixel: IntoRecursive, PxWriter: Sync + RecursivePixelWriter<::Recursive>, { - fn extract_uncompressed_block(&self, header: &Header, block_index: BlockIndex) -> Vec { - let block_bytes = block_index.pixel_size.area() * header.channels.bytes_per_pixel; - let mut block_bytes = vec![0_u8; block_bytes]; - + fn extract_uncompressed_block(&self, header: &Header, block_index: BlockIndex, block_bytes: &mut [u8]) { let width = block_index.pixel_size.0; let line_bytes = width * header.channels.bytes_per_pixel; let byte_lines = block_bytes.chunks_exact_mut(line_bytes); @@ -189,8 +192,6 @@ for SpecificChannelsWriter<'channels, PxWriter, Storage, Channels> self.recursive_channel_writer.write_pixels(line_bytes, pixel_line.as_slice(), |px| px); } - - block_bytes } } diff --git a/src/image/write/layers.rs b/src/image/write/layers.rs index 85648ffd..54019757 100644 --- a/src/image/write/layers.rs +++ b/src/image/write/layers.rs @@ -25,8 +25,8 @@ pub trait WritableLayers<'slf> { /// A temporary writer for a list of channels pub trait LayersWriter: Sync { - /// Deliver a block of pixels from a single layer to be stored in the file - fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex) -> Vec; + /// Fill a block of bytes with pixels data from a single layer to be stored in the file + fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex, output_block_data: &mut [u8]); } /// A temporary writer for an arbitrary list of layers @@ -117,14 +117,14 @@ impl<'slf, Channels: WritableChannels<'slf>> WritableLayers<'slf> for Layer LayersWriter for AllLayersWriter where C: ChannelsWriter { - fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex) -> Vec { - self.layers[block.layer].extract_uncompressed_block(std::slice::from_ref(&headers[block.layer]), block) // TODO no array-vs-first + fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex, output_block_data: &mut [u8]) { + self.layers[block.layer].extract_uncompressed_block(std::slice::from_ref(&headers[block.layer]), block, output_block_data) // TODO no array-vs-first } } impl LayersWriter for LayerWriter where C: ChannelsWriter { - fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex) -> Vec { - self.channels.extract_uncompressed_block(headers.first().expect("invalid inferred header"), block) // TODO no array-vs-first + fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex, output_block_data: &mut [u8]) { + self.channels.extract_uncompressed_block(headers.first().expect("invalid inferred header"), block, output_block_data) // TODO no array-vs-first } } @@ -165,7 +165,7 @@ impl<'slf, InnerLayers, Channels> WritableLayers<'slf> for Recursive = Recursive)>; impl LayersWriter for NoneMore { - fn extract_uncompressed_block(&self, _: &[Header], _: BlockIndex) -> Vec { + fn extract_uncompressed_block(&self, _: &[Header], _: BlockIndex, _: &mut [u8]) { panic!("recursive length mismatch bug"); } } @@ -173,14 +173,14 @@ impl LayersWriter for NoneMore { impl LayersWriter for RecursiveLayersWriter where InnerLayersWriter: LayersWriter, Channels: ChannelsWriter { - fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex) -> Vec { + fn extract_uncompressed_block(&self, headers: &[Header], block: BlockIndex, output_block_data: &mut [u8]) { let (layer_index, layer) = &self.value; if *layer_index == block.layer { let header = headers.get(*layer_index).expect("layer index bug"); - layer.extract_uncompressed_block(std::slice::from_ref(header), block) // TODO no slice? + layer.extract_uncompressed_block(std::slice::from_ref(header), block, output_block_data) // TODO no slice? } else { - self.inner.extract_uncompressed_block(headers, block) + self.inner.extract_uncompressed_block(headers, block, output_block_data) } } } diff --git a/src/image/write/mod.rs b/src/image/write/mod.rs index 3c200607..bdbd3064 100644 --- a/src/image/write/mod.rs +++ b/src/image/write/mod.rs @@ -26,6 +26,7 @@ use crate::image::{Image, ignore_progress, SpecificChannels, IntoSample}; use crate::image::write::layers::{WritableLayers, LayersWriter}; use crate::math::Vec2; use crate::block::writer::ChunksWriter; +use crate::block::UncompressedBlock; /// An oversimplified function for "just write the damn file already" use cases. /// Have a look at the examples to see how you can write an image with more flexibility (it's not that hard). @@ -159,22 +160,17 @@ impl<'img, L, F> WriteImageWithOptions<'img, L, F> write, headers, self.check_compatibility, move |meta, chunk_writer|{ - let blocks = meta.collect_ordered_block_data(|block_index| - layers.extract_uncompressed_block(&meta.headers, block_index) + let blocks = meta.collect_ordered_block_data(|header, block_index| + UncompressedBlock::fill_block_data(&header.channels, block_index, |output_block_bytes|{ + layers.extract_uncompressed_block(&meta.headers, block_index, output_block_bytes) + }) ); let chunk_writer = chunk_writer.on_progress(self.on_progress); - if self.parallel { chunk_writer.compress_all_blocks_parallel(&meta, blocks)?; } - else { chunk_writer.compress_all_blocks_sequential(&meta, blocks)?; } - /*let blocks_writer = chunk_writer.as_blocks_writer(&meta); // TODO propagate send requirement further upwards - if self.parallel { - blocks_writer.compress_all_blocks_parallel(blocks)?; - } - else { - blocks_writer.compress_all_blocks_sequential(blocks)?; - }*/ + if self.parallel { chunk_writer.compress_all_blocks_parallel(&meta, blocks)?; } + else { chunk_writer.compress_all_blocks_sequential(&meta, blocks)?; } // TODO reuse uncompressed_block allocation here Ok(()) } diff --git a/src/lib.rs b/src/lib.rs index cebb80e1..0d1eabfe 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -56,10 +56,11 @@ pub mod prelude { /// Import this specifically if you want to be explicit but still use the extension traits. pub mod traits { pub use crate::image::write::{WritableImage, channels::GetPixel}; + pub use crate::image::channel_groups::ReadGroupedChannels; pub use crate::image::read::{ read, any_channels::ReadSamples, image::ReadLayers, image::ReadImage, layers::ReadChannels, - specific_channels::{ReadSpecificChannel} + specific_channels::ReadSpecificChannel }; pub use crate::image::crop::{Crop, CropWhere, CropResult, InspectSample, CroppedChannels, ApplyCroppedView}; @@ -78,6 +79,7 @@ pub mod prelude { // image data structures pub use crate::image::*; + pub use crate::image::channel_groups::ChannelGroups; pub use crate::meta::{ attribute, MetaData, header::{ LayerAttributes, ImageAttributes } }; pub use crate::block::samples::Sample; pub use crate::meta::attribute::{ diff --git a/src/meta/attribute.rs b/src/meta/attribute.rs index 21ff4b4e..d1fe74b6 100644 --- a/src/meta/attribute.rs +++ b/src/meta/attribute.rs @@ -220,11 +220,11 @@ pub struct FloatRect { pub max: Vec2 } -/// A List of channels. Channels must be sorted alphabetically. +/// A List of channels. Channels are sorted alphabetically by name. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct ChannelList { - /// The channels in this list. + /// The channels in this list, sorted by name. pub list: SmallVec<[ChannelDescription; 5]>, /// The number of bytes that one pixel in this image needs. @@ -404,7 +404,7 @@ pub enum LevelMode { // will mostly be "R", "G", "B" or "deepscanlineimage" pub type TextBytes = SmallVec<[u8; 24]>; -/// A byte slice, interpreted as text +/// A byte slice, interpreted as text. pub type TextSlice = [u8]; @@ -622,6 +622,12 @@ impl Text { string_chars.eq(self_chars) } + + /// Append characters to the front of the text. + // Always valid, as each u8 cannot be invalid. + pub fn push_front(&mut self, other: impl IntoIter) { + self.bytes.insert_many(0, other); + } } impl PartialEq for Text { diff --git a/src/meta/header.rs b/src/meta/header.rs index 53904427..ffce7fac 100644 --- a/src/meta/header.rs +++ b/src/meta/header.rs @@ -595,10 +595,10 @@ impl Header { self.channels.list.iter() .map(|channel: &ChannelDescription| - pixel_count_of_levels(channel.subsampled_resolution(self.layer_size)) * channel.sample_type.bytes_per_sample() + pixel_count_of_levels(channel.subsampled_resolution(self.layer_size)) + * channel.sample_type.bytes_per_sample() ) .sum() - } /// Approximates the maximum number of bytes that the pixels of this header will consume in a file. diff --git a/src/meta/mod.rs b/src/meta/mod.rs index 0c36af88..90879c70 100644 --- a/src/meta/mod.rs +++ b/src/meta/mod.rs @@ -438,29 +438,29 @@ impl MetaData { /// The blocks written to the file must be exactly in this order, /// except for when the `LineOrder` is unspecified. /// The index represents the block index, in increasing line order, within the header. - pub fn enumerate_ordered_header_block_indices(&self) -> impl '_ + Iterator { + pub fn enumerate_ordered_header_block_indices(&self) -> impl '_ + Iterator { crate::block::enumerate_ordered_header_block_indices(&self.headers) } /// Go through all the block indices in the correct order and call the specified closure for each of these blocks. /// That way, the blocks indices are filled with real block data and returned as an iterator. /// The closure returns the an `UncompressedBlock` for each block index. - pub fn collect_ordered_blocks<'s>(&'s self, mut get_block: impl 's + FnMut(BlockIndex) -> UncompressedBlock) + pub fn collect_ordered_blocks<'s>(&'s self, mut get_block: impl 's + FnMut(&Header, BlockIndex) -> UncompressedBlock) -> impl 's + Iterator { - self.enumerate_ordered_header_block_indices().map(move |(index_in_header, block_index)|{ - (index_in_header, get_block(block_index)) + self.enumerate_ordered_header_block_indices().map(move |(header, index_in_header, block_index)|{ + (index_in_header, get_block(header, block_index)) }) } /// Go through all the block indices in the correct order and call the specified closure for each of these blocks. /// That way, the blocks indices are filled with real block data and returned as an iterator. /// The closure returns the byte data for each block index. - pub fn collect_ordered_block_data<'s>(&'s self, mut get_block_data: impl 's + FnMut(BlockIndex) -> Vec) + pub fn collect_ordered_block_data<'s>(&'s self, mut get_block_data: impl 's + FnMut(&Header, BlockIndex) -> Vec) -> impl 's + Iterator { - self.collect_ordered_blocks(move |block_index| - UncompressedBlock { index: block_index, data: get_block_data(block_index) } + self.collect_ordered_blocks(move |header, block_index| + UncompressedBlock { index: block_index, data: get_block_data(header, block_index) } ) }