Skip to content

Commit

Permalink
Merge pull request #2 from HENDRIX-ZT2/refactor/doc_tests
Browse files Browse the repository at this point in the history
Essential PyFFI fixes for NIF import
  • Loading branch information
neomonkeus committed Sep 15, 2019
2 parents 08d6e08 + 7b26095 commit b8d7ea3
Showing 1 changed file with 59 additions and 28 deletions.
87 changes: 59 additions & 28 deletions pyffi/formats/nif/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2335,17 +2335,17 @@ class bhkLimitedHingeConstraint:
def apply_scale(self, scale):
"""Scale data."""
# apply scale on transform
self.sub_constraint.limited_hinge.pivot_a.x *= scale
self.sub_constraint.limited_hinge.pivot_a.y *= scale
self.sub_constraint.limited_hinge.pivot_a.z *= scale
self.sub_constraint.limited_hinge.pivot_b.x *= scale
self.sub_constraint.limited_hinge.pivot_b.y *= scale
self.sub_constraint.limited_hinge.pivot_b.z *= scale
self.limited_hinge.pivot_a.x *= scale
self.limited_hinge.pivot_a.y *= scale
self.limited_hinge.pivot_a.z *= scale
self.limited_hinge.pivot_b.x *= scale
self.limited_hinge.pivot_b.y *= scale
self.limited_hinge.pivot_b.z *= scale

def update_a_b(self, parent):
"""Update the B data from the A data. The parent argument is simply a
common parent to the entities."""
self.sub_constraint.limited_hinge.update_a_b(self.get_transform_a_b(parent))
self.limited_hinge.update_a_b(self.get_transform_a_b(parent))

class bhkListShape:
def get_mass_center_inertia(self, density = 1, solid = True):
Expand Down Expand Up @@ -2406,24 +2406,24 @@ class bhkMalleableConstraint:
def apply_scale(self, scale):
"""Scale data."""
# apply scale on transform
self.sub_constraint.ragdoll.pivot_a.x *= scale
self.sub_constraint.ragdoll.pivot_a.y *= scale
self.sub_constraint.ragdoll.pivot_a.z *= scale
self.sub_constraint.ragdoll.pivot_b.x *= scale
self.sub_constraint.ragdoll.pivot_b.y *= scale
self.sub_constraint.ragdoll.pivot_b.z *= scale
self.sub_constraint.limited_hinge.pivot_a.x *= scale
self.sub_constraint.limited_hinge.pivot_a.y *= scale
self.sub_constraint.limited_hinge.pivot_a.z *= scale
self.sub_constraint.limited_hinge.pivot_b.x *= scale
self.sub_constraint.limited_hinge.pivot_b.y *= scale
self.sub_constraint.limited_hinge.pivot_b.z *= scale
self.ragdoll.pivot_a.x *= scale
self.ragdoll.pivot_a.y *= scale
self.ragdoll.pivot_a.z *= scale
self.ragdoll.pivot_b.x *= scale
self.ragdoll.pivot_b.y *= scale
self.ragdoll.pivot_b.z *= scale
self.limited_hinge.pivot_a.x *= scale
self.limited_hinge.pivot_a.y *= scale
self.limited_hinge.pivot_a.z *= scale
self.limited_hinge.pivot_b.x *= scale
self.limited_hinge.pivot_b.y *= scale
self.limited_hinge.pivot_b.z *= scale

def update_a_b(self, parent):
"""Update the B data from the A data."""
transform = self.get_transform_a_b(parent)
self.sub_constraint.limited_hinge.update_a_b(transform)
self.sub_constraint.ragdoll.update_a_b(transform)
self.limited_hinge.update_a_b(transform)
self.ragdoll.update_a_b(transform)

class bhkMoppBvTreeShape:
def get_mass_center_inertia(self, density=1, solid=True):
Expand Down Expand Up @@ -4691,19 +4691,50 @@ def get_skin_deformation(self):
normals = [ NifFormat.Vector3() for i in range(self.data.num_vertices) ]
sumweights = [ 0.0 for i in range(self.data.num_vertices) ]
skin_offset = skindata.get_transform()
# store one transform & rotation per bone
bone_transforms = []
for i, bone_block in enumerate(skininst.bones):
bonedata = skindata.bone_list[i]
bone_offset = bonedata.get_transform()
bone_matrix = bone_block.get_transform(skelroot)
transform = bone_offset * bone_matrix * skin_offset
scale, rotation, translation = transform.get_scale_rotation_translation()
for skinweight in bonedata.vertex_weights:
index = skinweight.index
weight = skinweight.weight
vertices[index] += weight * (self.data.vertices[index] * transform)
if self.data.has_normals:
normals[index] += weight * (self.data.normals[index] * rotation)
sumweights[index] += weight
bone_transforms.append( (transform, rotation) )

# the usual case
if skindata.has_vertex_weights:
for i, bone_block in enumerate(skininst.bones):
bonedata = skindata.bone_list[i]
transform, rotation = bone_transforms[i]
for skinweight in bonedata.vertex_weights:
index = skinweight.index
weight = skinweight.weight
vertices[index] += weight * (self.data.vertices[index] * transform)
if self.data.has_normals:
normals[index] += weight * (self.data.normals[index] * rotation)
sumweights[index] += weight
# we must get weights from the partition
else:
skinpartition = skininst.skin_partition
for block in skinpartition.skin_partition_blocks:
# get transforms for this block
block_bone_transforms = [bone_transforms[i] for i in block.bones]

# go over each vert in this block
for vert_index, vertex_weights, bone_indices in zip(block.vertex_map,
block.vertex_weights,
block.bone_indices):
# skip verts that were already processed in an earlier block
if sumweights[vert_index] != 0.0:
continue
# go over all 4 weight / bone pairs and transform this vert
for weight, b_i in zip(vertex_weights, bone_indices):
if weight > 0.0:
transform, rotation = block_bone_transforms[b_i]
vertices[vert_index] += weight * (self.data.vertices[vert_index] * transform)
if self.data.has_normals:
normals[vert_index] += weight * (self.data.normals[vert_index] * rotation)
sumweights[vert_index] += weight

for i, s in enumerate(sumweights):
if abs(s - 1.0) > 0.01:
Expand Down

0 comments on commit b8d7ea3

Please sign in to comment.