Skip to content

Commit

Permalink
Bugifxes to index data unfication, now supports tangents.
Browse files Browse the repository at this point in the history
  • Loading branch information
FractalFir committed Jun 9, 2023
1 parent 43c3615 commit 545b4ad
Show file tree
Hide file tree
Showing 4 changed files with 81 additions and 22 deletions.
33 changes: 16 additions & 17 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ macro_rules! runtime_agnostic_block_on {
}
#[doc(hidden)]
pub mod custom_data;
#[allow(dead_code)]
mod material;
#[cfg(feature = "model_importer")]
mod model_importer;
Expand Down Expand Up @@ -76,7 +77,7 @@ use crate::custom_data::CustomDataSegment;
#[doc(inline)]
pub use crate::custom_data::{CustomData, DataSegmentError};
#[doc(inline)]
use crate::material::MaterialInfo;
//use crate::material::MaterialInfo;
#[doc(inline)]
pub use crate::normals::NormalPrecisionMode;
#[doc(inline)]
Expand Down Expand Up @@ -128,7 +129,7 @@ pub struct TMFMesh {
uv_triangles: Option<Vec<IndexType>>,
tangents: Option<Vec<Tangent>>,
tangent_triangles: Option<Vec<IndexType>>,
materials: Option<MaterialInfo>,
//materials: Option<MaterialInfo>,
custom_data: Vec<CustomDataSegment>,
}
impl Default for TMFMesh {
Expand Down Expand Up @@ -162,26 +163,19 @@ impl TMFMesh {
self.set_uv_triangles(uv_triangles);
}
}
/// Changes mesh data to make all index arrays(e.g. `vertex_triangle_array`,`normal_triangle_array`, etc.) exacyl the same. Does not support custom index segments, and will leave them unaffected.
/// Very often drastically reduces mesh size.
pub fn unify_index_data(&mut self) {
/*
let v_vt_n_nt = (self.get_vertices().zip(self.get_vertex_triangles()))
.zip(self.get_normals().zip(self.get_normal_triangles()));
if let Some(((vertices, vertex_triangles), (normals, normal_triangles))) = v_vt_n_nt {
let (indices, vertices, normals) =
unify_data::merge_data_2(&[vertex_triangles, normal_triangles], vertices, normals);
self.set_vertices(vertices);
self.set_normals(normals);
self.set_vertex_triangles(indices.clone());
self.set_normal_triangles(indices);
}*/
let (vertices, normals, uvs, indices) = unify_data::smart_merge_data_3(
let (vertices, normals, uvs, tangents, indices,) = unify_data::smart_merge_data_4(
self.get_vertices(),
self.get_normals(),
self.get_uvs(),
self.get_tangents(),
[
self.get_vertex_triangles(),
self.get_normal_triangles(),
self.get_uv_triangles(),
self.get_tangent_triangles(),
],
);
if let Some(indices) = indices {
Expand All @@ -198,7 +192,12 @@ impl TMFMesh {
if let Some(uvs) = uvs {
//println!("unfied uvs!");
self.set_uvs(uvs);
self.set_uv_triangles(indices);
self.set_uv_triangles(indices.clone());
}
if let Some(tangents) = tangents {
//println!("unfied uvs!");
self.set_tangents(tangents);
self.set_tangent_triangles(indices);
}
}
//todo!();
Expand Down Expand Up @@ -725,7 +724,7 @@ impl TMFMesh {
vertex_triangles: None,
vertices: None,
tangents: None,
materials: None,
//materials: None,
custom_data: Vec::new(),
tangent_triangles: None,
}
Expand Down Expand Up @@ -920,7 +919,7 @@ pub enum TMFImportError {
NewerVersionRequired,
#[error("A file segment exceeded the maximum length(2GB) was encountered. This segments length is highly unusual, and the segment unlikely to be valid. The segment was not read to prevent memory issues.")]
SegmentTooLong,
#[error("A segments compression type requires that it must be preceded by another segment, from which some of the data is deduced. ")]
#[error("A segments compression type requires that it must be preceded by another segment, from which some of the data is deduced.")]
NoDataBeforeOmmitedSegment,
#[error("Byte precision is too high ({0}: over 64 bits) and is invalid.")]
InvalidPrecision(u8),
Expand Down
2 changes: 1 addition & 1 deletion src/reorder_triangles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fn reorder_triangles<const ARRAY_COUNT: usize>(
let mut delta = 0;
for array_index in 0..ARRAY_COUNT {
let curr = triangles[array_index][index];
delta = delta.max((curr as isize - last[array_index] as isize).abs() as usize);
delta = delta.max((curr as isize - last[array_index] as isize).unsigned_abs());
last[array_index] = curr;
}
max_deltas.push(delta);
Expand Down
2 changes: 1 addition & 1 deletion src/tmf_exporter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ fn merge_segments(segments: &[DecodedSegment]) -> Box<[DecodedSegment]> {
let mut new_segments = Vec::with_capacity(segments.len());
while let Some(mut segment) = segments.pop() {
if segment.is_something() {
for mut other in &mut segments {
for other in &mut segments {
segment.merge(other);
}
new_segments.push(segment);
Expand Down
66 changes: 63 additions & 3 deletions src/unify_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ pub(crate) fn merge_data_2<A: Copy, B: Copy>(
.iter()
.map(|index| a[*index as usize])
.collect();
let b: Box<[_]> = index_map[0]
let b: Box<[_]> = index_map[1]
.iter()
.map(|index| b[*index as usize])
.collect();
Expand All @@ -66,16 +66,42 @@ pub(crate) fn merge_data_3<A: Copy, B: Copy, C: Copy>(
.iter()
.map(|index| a[*index as usize])
.collect();
let b: Box<[_]> = index_map[0]
let b: Box<[_]> = index_map[1]
.iter()
.map(|index| b[*index as usize])
.collect();
let c: Box<[_]> = index_map[0]
let c: Box<[_]> = index_map[2]
.iter()
.map(|index| c[*index as usize])
.collect();
(indices, a, b, c)
}
pub(crate) fn merge_data_4<A: Copy, B: Copy, C: Copy,D:Copy>(
indices: &[&[IndexType]; 4],
a: &[A],
b: &[B],
c: &[C],
d: &[D],
) -> (Box<[IndexType]>, Box<[A]>, Box<[B]>, Box<[C]>, Box<[D]>) {
let (indices, index_map) = unfiy_data_common::<4>(indices);
let a: Box<[_]> = index_map[0]
.iter()
.map(|index| a[*index as usize])
.collect();
let b: Box<[_]> = index_map[1]
.iter()
.map(|index| b[*index as usize])
.collect();
let c: Box<[_]> = index_map[2]
.iter()
.map(|index| c[*index as usize])
.collect();
let d: Box<[_]> = index_map[3]
.iter()
.map(|index| d[*index as usize])
.collect();
(indices, a, b, c, d)
}
type OBoxArr<A> = Option<Box<[A]>>;
fn smart_merge_data_2<A: Copy, B: Copy>(
a: Option<&[A]>,
Expand Down Expand Up @@ -115,6 +141,40 @@ pub(crate) fn smart_merge_data_3<A: Copy, B: Copy, C: Copy>(
(None, None, None, None)
}
}
pub(crate) fn smart_merge_data_4<A: Copy, B: Copy, C: Copy,D:Copy>(
a: Option<&[A]>,
b: Option<&[B]>,
c: Option<&[C]>,
d: Option<&[D]>,
indices: [Option<&[IndexType]>; 4],
) -> (OBoxArr<A>, OBoxArr<B>, OBoxArr<C>, OBoxArr<D>, OBoxArr<IndexType>) {
if !a.is_some_and(|data| !data.is_empty()) || indices[0].is_none() {
let (b, c, d, indices) = smart_merge_data_3(b, c,d, [indices[1], indices[2],indices[3]]);
(None, b, c, d, indices)
} else if !b.is_some_and(|data| !data.is_empty()) || indices[1].is_none() {
let (a, c,d, indices) = smart_merge_data_3(a, c,d, [indices[0], indices[2],indices[3]]);
(a, None, c, d, indices)
} else if !c.is_some_and(|data| !data.is_empty()) || indices[2].is_none() {
let (a, b,d, indices) = smart_merge_data_3(a, b,d, [indices[0], indices[1],indices[3]]);
(a, b, None, d, indices)
}
else if !d.is_some_and(|data| !data.is_empty()) || indices[3].is_none() {
let (a, b,c, indices) = smart_merge_data_3(a, b,c, [indices[0], indices[1],indices[2]]);
(a, b,c, None, indices)
}
else if let Some((((a, indices_a), (b, indices_b)), ((c, indices_c),(d, indices_d)))) = a
.zip(indices[0])
.zip(b.zip(indices[1]))
.zip(c.zip(indices[2]).zip(d.zip(indices[3])))
{
let d:&[D] = d;
let (indices, a, b, c,d) = merge_data_4(&[indices_a, indices_b, indices_c,indices_d], a, b, c,d);
let d:Box<[D]> = d;
(Some(a), Some(b), Some(c), Some(d), Some(indices))
} else {
(None, None, None, None,None)
}
}
/*
pub(crate) fn merge_data_4<A: Copy, B: Copy,C:Copy,D:Copy>(
indices: &[&[IndexType]; 3],
Expand Down

0 comments on commit 545b4ad

Please sign in to comment.