mirror of
https://github.com/DarklightGames/io_scene_psk_psa.git
synced 2024-11-15 02:37:39 +01:00
Merge branch 'feature-psk-mesh-wysiwyg'
This commit is contained in:
commit
07f1c45000
@ -1,5 +1,6 @@
|
|||||||
import datetime
|
import datetime
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
import re
|
||||||
from typing import List, Iterable
|
from typing import List, Iterable
|
||||||
|
|
||||||
from bpy.types import NlaStrip, Object
|
from bpy.types import NlaStrip, Object
|
||||||
@ -31,22 +32,11 @@ def rgb_to_srgb(c):
|
|||||||
return 12.92 * c
|
return 12.92 * c
|
||||||
|
|
||||||
|
|
||||||
def get_nla_strips_ending_at_frame(object, frame) -> List[NlaStrip]:
|
def get_nla_strips_in_timeframe(animation_data, frame_min, frame_max) -> List[NlaStrip]:
|
||||||
if object is None or object.animation_data is None:
|
if animation_data is None:
|
||||||
return []
|
return []
|
||||||
strips = []
|
strips = []
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
for nla_track in animation_data.nla_tracks:
|
||||||
for strip in nla_track.strips:
|
|
||||||
if strip.frame_end == frame:
|
|
||||||
strips.append(strip)
|
|
||||||
return strips
|
|
||||||
|
|
||||||
|
|
||||||
def get_nla_strips_in_timeframe(object, frame_min, frame_max) -> List[NlaStrip]:
|
|
||||||
if object is None or object.animation_data is None:
|
|
||||||
return []
|
|
||||||
strips = []
|
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
|
||||||
if nla_track.mute:
|
if nla_track.mute:
|
||||||
continue
|
continue
|
||||||
for strip in nla_track.strips:
|
for strip in nla_track.strips:
|
||||||
@ -103,6 +93,14 @@ def get_psa_sequence_name(action, should_use_original_sequence_name):
|
|||||||
return action.name
|
return action.name
|
||||||
|
|
||||||
|
|
||||||
|
def check_bone_names(bone_names: Iterable[str]):
|
||||||
|
pattern = re.compile(r'^[a-zA-Z0-9_ ]+$')
|
||||||
|
invalid_bone_names = [x for x in bone_names if pattern.match(x) is None]
|
||||||
|
if len(invalid_bone_names) > 0:
|
||||||
|
raise RuntimeError(f'The following bone names are invalid: {invalid_bone_names}.\n'
|
||||||
|
f'Bone names must only contain letters, numbers, spaces, and underscores.')
|
||||||
|
|
||||||
|
|
||||||
def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices: List[int]) -> List[str]:
|
def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices: List[int]) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.
|
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
from typing import Dict, Iterable
|
from typing import Dict, Iterable
|
||||||
|
|
||||||
from bpy.types import Action
|
from bpy.types import Action
|
||||||
from mathutils import Matrix
|
|
||||||
|
|
||||||
from .data import *
|
from .data import *
|
||||||
from ..helpers import *
|
from ..helpers import *
|
||||||
|
|
||||||
|
|
||||||
class PsaBuilderOptions(object):
|
class PsaBuildOptions(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.should_override_animation_data = False
|
||||||
|
self.animation_data_override = None
|
||||||
self.fps_source = 'SCENE'
|
self.fps_source = 'SCENE'
|
||||||
self.fps_custom = 30.0
|
self.fps_custom = 30.0
|
||||||
self.sequence_source = 'ACTIONS'
|
self.sequence_source = 'ACTIONS'
|
||||||
@ -23,257 +24,259 @@ class PsaBuilderOptions(object):
|
|||||||
self.root_motion = False
|
self.root_motion = False
|
||||||
|
|
||||||
|
|
||||||
class PsaBuilder(object):
|
def get_sequence_fps(context, options: PsaBuildOptions, actions: Iterable[Action]) -> float:
|
||||||
def __init__(self):
|
if options.fps_source == 'SCENE':
|
||||||
pass
|
return context.scene.render.fps
|
||||||
|
if options.fps_source == 'CUSTOM':
|
||||||
def get_sequence_fps(self, context, options: PsaBuilderOptions, actions: Iterable[Action]) -> float:
|
return options.fps_custom
|
||||||
if options.fps_source == 'SCENE':
|
elif options.fps_source == 'ACTION_METADATA':
|
||||||
|
# Get the minimum value of action metadata FPS values.
|
||||||
|
fps_list = []
|
||||||
|
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
|
||||||
|
fps = action['psa_sequence_fps']
|
||||||
|
if type(fps) == int or type(fps) == float:
|
||||||
|
fps_list.append(fps)
|
||||||
|
if len(fps_list) > 0:
|
||||||
|
return min(fps_list)
|
||||||
|
else:
|
||||||
|
# No valid action metadata to use, fallback to scene FPS
|
||||||
return context.scene.render.fps
|
return context.scene.render.fps
|
||||||
if options.fps_source == 'CUSTOM':
|
else:
|
||||||
return options.fps_custom
|
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
|
||||||
elif options.fps_source == 'ACTION_METADATA':
|
|
||||||
# Get the minimum value of action metadata FPS values.
|
|
||||||
fps_list = []
|
def get_timeline_marker_sequence_frame_ranges(animation_data, context, options: PsaBuildOptions) -> Dict:
|
||||||
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
|
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
||||||
fps = action['psa_sequence_fps']
|
sequence_frame_ranges = dict()
|
||||||
if type(fps) == int or type(fps) == float:
|
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
||||||
fps_list.append(fps)
|
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
|
||||||
if len(fps_list) > 0:
|
|
||||||
return min(fps_list)
|
for marker_name in options.marker_names:
|
||||||
else:
|
marker = context.scene.timeline_markers[marker_name]
|
||||||
# No valid action metadata to use, fallback to scene FPS
|
frame_min = marker.frame
|
||||||
return context.scene.render.fps
|
# Determine the final frame of the sequence based on the next marker.
|
||||||
|
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
||||||
|
marker_index = sorted_timeline_marker_names.index(marker_name)
|
||||||
|
next_marker_index = marker_index + 1
|
||||||
|
frame_max = 0
|
||||||
|
if next_marker_index < len(sorted_timeline_markers):
|
||||||
|
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
||||||
|
frame_max = sorted_timeline_markers[next_marker_index].frame
|
||||||
|
if options.should_trim_timeline_marker_sequences:
|
||||||
|
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_max)
|
||||||
|
if len(nla_strips) > 0:
|
||||||
|
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
||||||
|
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
||||||
|
else:
|
||||||
|
# No strips in between this marker and the next, just export this as a one-frame animation.
|
||||||
|
frame_max = frame_min
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
|
# There is no next marker.
|
||||||
|
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
||||||
def build(self, context, options: PsaBuilderOptions) -> Psa:
|
for nla_track in animation_data.nla_tracks:
|
||||||
active_object = context.view_layer.objects.active
|
if nla_track.mute:
|
||||||
|
|
||||||
if active_object.type != 'ARMATURE':
|
|
||||||
raise RuntimeError('Selected object must be an Armature')
|
|
||||||
|
|
||||||
armature = active_object
|
|
||||||
|
|
||||||
if armature.animation_data is None:
|
|
||||||
raise RuntimeError('No animation data for armature')
|
|
||||||
|
|
||||||
# Ensure that we actually have items that we are going to be exporting.
|
|
||||||
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
|
|
||||||
raise RuntimeError('No actions were selected for export')
|
|
||||||
elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0:
|
|
||||||
raise RuntimeError('No timeline markers were selected for export')
|
|
||||||
|
|
||||||
psa = Psa()
|
|
||||||
|
|
||||||
bones = list(armature.data.bones)
|
|
||||||
|
|
||||||
# The order of the armature bones and the pose bones is not guaranteed to be the same.
|
|
||||||
# As as a result, we need to reconstruct the list of pose bones in the same order as the
|
|
||||||
# armature bones.
|
|
||||||
bone_names = [x.name for x in bones]
|
|
||||||
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
|
|
||||||
pose_bones.sort(key=lambda x: x[0])
|
|
||||||
pose_bones = [x[1] for x in pose_bones]
|
|
||||||
|
|
||||||
# Get a list of all the bone indices and instigator bones for the bone filter settings.
|
|
||||||
export_bone_names = get_export_bone_names(armature, options.bone_filter_mode, options.bone_group_indices)
|
|
||||||
bone_indices = [bone_names.index(x) for x in export_bone_names]
|
|
||||||
|
|
||||||
# Make the bone lists contain only the bones that are going to be exported.
|
|
||||||
bones = [bones[bone_index] for bone_index in bone_indices]
|
|
||||||
pose_bones = [pose_bones[bone_index] for bone_index in bone_indices]
|
|
||||||
|
|
||||||
# No bones are going to be exported.
|
|
||||||
if len(bones) == 0:
|
|
||||||
raise RuntimeError('No bones available for export')
|
|
||||||
|
|
||||||
# Build list of PSA bones.
|
|
||||||
for bone in bones:
|
|
||||||
psa_bone = Psa.Bone()
|
|
||||||
psa_bone.name = bytes(bone.name, encoding='utf-8')
|
|
||||||
|
|
||||||
try:
|
|
||||||
parent_index = bones.index(bone.parent)
|
|
||||||
psa_bone.parent_index = parent_index
|
|
||||||
psa.bones[parent_index].children_count += 1
|
|
||||||
except ValueError:
|
|
||||||
psa_bone.parent_index = -1
|
|
||||||
|
|
||||||
if bone.parent is not None:
|
|
||||||
rotation = bone.matrix.to_quaternion()
|
|
||||||
rotation.x = -rotation.x
|
|
||||||
rotation.y = -rotation.y
|
|
||||||
rotation.z = -rotation.z
|
|
||||||
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
|
||||||
parent_head = quat_parent @ bone.parent.head
|
|
||||||
parent_tail = quat_parent @ bone.parent.tail
|
|
||||||
location = (parent_tail - parent_head) + bone.head
|
|
||||||
else:
|
|
||||||
location = armature.matrix_local @ bone.head
|
|
||||||
rot_matrix = bone.matrix @ armature.matrix_local.to_3x3()
|
|
||||||
rotation = rot_matrix.to_quaternion()
|
|
||||||
|
|
||||||
psa_bone.location.x = location.x
|
|
||||||
psa_bone.location.y = location.y
|
|
||||||
psa_bone.location.z = location.z
|
|
||||||
|
|
||||||
psa_bone.rotation.x = rotation.x
|
|
||||||
psa_bone.rotation.y = rotation.y
|
|
||||||
psa_bone.rotation.z = rotation.z
|
|
||||||
psa_bone.rotation.w = rotation.w
|
|
||||||
|
|
||||||
psa.bones.append(psa_bone)
|
|
||||||
|
|
||||||
# Populate the export sequence list.
|
|
||||||
class NlaState:
|
|
||||||
def __init__(self):
|
|
||||||
self.frame_min = 0
|
|
||||||
self.frame_max = 0
|
|
||||||
self.action = None
|
|
||||||
|
|
||||||
class ExportSequence:
|
|
||||||
def __init__(self):
|
|
||||||
self.name = ''
|
|
||||||
self.nla_state = NlaState()
|
|
||||||
self.fps = 30.0
|
|
||||||
|
|
||||||
export_sequences = []
|
|
||||||
|
|
||||||
if options.sequence_source == 'ACTIONS':
|
|
||||||
for action in options.actions:
|
|
||||||
if len(action.fcurves) == 0:
|
|
||||||
continue
|
continue
|
||||||
export_sequence = ExportSequence()
|
for strip in nla_track.strips:
|
||||||
export_sequence.nla_state.action = action
|
frame_max = max(frame_max, strip.frame_end)
|
||||||
export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names)
|
|
||||||
frame_min, frame_max = [int(x) for x in action.frame_range]
|
|
||||||
export_sequence.nla_state.frame_min = frame_min
|
|
||||||
export_sequence.nla_state.frame_max = frame_max
|
|
||||||
export_sequence.fps = self.get_sequence_fps(context, options, [action])
|
|
||||||
export_sequences.append(export_sequence)
|
|
||||||
pass
|
|
||||||
elif options.sequence_source == 'TIMELINE_MARKERS':
|
|
||||||
sequence_frame_ranges = self.get_timeline_marker_sequence_frame_ranges(armature, context, options)
|
|
||||||
|
|
||||||
for name, (frame_min, frame_max) in sequence_frame_ranges.items():
|
if frame_min > frame_max:
|
||||||
export_sequence = ExportSequence()
|
continue
|
||||||
export_sequence.name = name
|
|
||||||
export_sequence.nla_state.action = None
|
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
|
||||||
export_sequence.nla_state.frame_min = frame_min
|
|
||||||
export_sequence.nla_state.frame_max = frame_max
|
return sequence_frame_ranges
|
||||||
nla_strips_actions = set(
|
|
||||||
map(lambda x: x.action, get_nla_strips_in_timeframe(active_object, frame_min, frame_max)))
|
|
||||||
export_sequence.fps = self.get_sequence_fps(context, options, nla_strips_actions)
|
def build_psa(context, options: PsaBuildOptions) -> Psa:
|
||||||
export_sequences.append(export_sequence)
|
active_object = context.view_layer.objects.active
|
||||||
|
|
||||||
|
if active_object.type != 'ARMATURE':
|
||||||
|
raise RuntimeError('Selected object must be an Armature')
|
||||||
|
|
||||||
|
if options.should_override_animation_data:
|
||||||
|
animation_data_object = options.animation_data_override
|
||||||
|
else:
|
||||||
|
animation_data_object = active_object
|
||||||
|
|
||||||
|
animation_data = animation_data_object.animation_data
|
||||||
|
|
||||||
|
if animation_data is None:
|
||||||
|
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||||
|
|
||||||
|
# Ensure that we actually have items that we are going to be exporting.
|
||||||
|
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
|
||||||
|
raise RuntimeError('No actions were selected for export')
|
||||||
|
elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0:
|
||||||
|
raise RuntimeError('No timeline markers were selected for export')
|
||||||
|
|
||||||
|
psa = Psa()
|
||||||
|
|
||||||
|
armature = active_object
|
||||||
|
bones = list(armature.data.bones)
|
||||||
|
|
||||||
|
# The order of the armature bones and the pose bones is not guaranteed to be the same.
|
||||||
|
# As a result, we need to reconstruct the list of pose bones in the same order as the
|
||||||
|
# armature bones.
|
||||||
|
bone_names = [x.name for x in bones]
|
||||||
|
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
|
||||||
|
pose_bones.sort(key=lambda x: x[0])
|
||||||
|
pose_bones = [x[1] for x in pose_bones]
|
||||||
|
|
||||||
|
# Get a list of all the bone indices and instigator bones for the bone filter settings.
|
||||||
|
export_bone_names = get_export_bone_names(armature, options.bone_filter_mode, options.bone_group_indices)
|
||||||
|
bone_indices = [bone_names.index(x) for x in export_bone_names]
|
||||||
|
|
||||||
|
# Make the bone lists contain only the bones that are going to be exported.
|
||||||
|
bones = [bones[bone_index] for bone_index in bone_indices]
|
||||||
|
pose_bones = [pose_bones[bone_index] for bone_index in bone_indices]
|
||||||
|
|
||||||
|
# No bones are going to be exported.
|
||||||
|
if len(bones) == 0:
|
||||||
|
raise RuntimeError('No bones available for export')
|
||||||
|
|
||||||
|
# Check that all bone names are valid.
|
||||||
|
check_bone_names(map(lambda bone: bone.name, bones))
|
||||||
|
|
||||||
|
# Build list of PSA bones.
|
||||||
|
for bone in bones:
|
||||||
|
psa_bone = Psa.Bone()
|
||||||
|
psa_bone.name = bytes(bone.name, encoding='windows-1252')
|
||||||
|
|
||||||
|
try:
|
||||||
|
parent_index = bones.index(bone.parent)
|
||||||
|
psa_bone.parent_index = parent_index
|
||||||
|
psa.bones[parent_index].children_count += 1
|
||||||
|
except ValueError:
|
||||||
|
psa_bone.parent_index = -1
|
||||||
|
|
||||||
|
if bone.parent is not None:
|
||||||
|
rotation = bone.matrix.to_quaternion().conjugated()
|
||||||
|
inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
|
||||||
|
parent_head = inverse_parent_rotation @ bone.parent.head
|
||||||
|
parent_tail = inverse_parent_rotation @ bone.parent.tail
|
||||||
|
location = (parent_tail - parent_head) + bone.head
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
|
location = armature.matrix_local @ bone.head
|
||||||
|
rot_matrix = bone.matrix @ armature.matrix_local.to_3x3()
|
||||||
|
rotation = rot_matrix.to_quaternion()
|
||||||
|
|
||||||
# Add prefixes and suffices to the names of the export sequences and strip whitespace.
|
psa_bone.location.x = location.x
|
||||||
for export_sequence in export_sequences:
|
psa_bone.location.y = location.y
|
||||||
export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'.strip()
|
psa_bone.location.z = location.z
|
||||||
|
|
||||||
# Now build the PSA sequences.
|
psa_bone.rotation.x = rotation.x
|
||||||
# We actually alter the timeline frame and simply record the resultant pose bone matrices.
|
psa_bone.rotation.y = rotation.y
|
||||||
frame_start_index = 0
|
psa_bone.rotation.z = rotation.z
|
||||||
|
psa_bone.rotation.w = rotation.w
|
||||||
|
|
||||||
for export_sequence in export_sequences:
|
psa.bones.append(psa_bone)
|
||||||
armature.animation_data.action = export_sequence.nla_state.action
|
|
||||||
context.view_layer.update()
|
|
||||||
|
|
||||||
psa_sequence = Psa.Sequence()
|
# Populate the export sequence list.
|
||||||
|
class NlaState:
|
||||||
|
def __init__(self):
|
||||||
|
self.frame_min = 0
|
||||||
|
self.frame_max = 0
|
||||||
|
self.action = None
|
||||||
|
|
||||||
frame_min = export_sequence.nla_state.frame_min
|
class ExportSequence:
|
||||||
frame_max = export_sequence.nla_state.frame_max
|
def __init__(self):
|
||||||
frame_count = frame_max - frame_min + 1
|
self.name = ''
|
||||||
|
self.nla_state = NlaState()
|
||||||
|
self.fps = 30.0
|
||||||
|
|
||||||
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
export_sequences = []
|
||||||
psa_sequence.frame_count = frame_count
|
|
||||||
psa_sequence.frame_start_index = frame_start_index
|
|
||||||
psa_sequence.fps = export_sequence.fps
|
|
||||||
|
|
||||||
frame_count = frame_max - frame_min + 1
|
if options.sequence_source == 'ACTIONS':
|
||||||
|
for action in options.actions:
|
||||||
for frame in range(frame_count):
|
if len(action.fcurves) == 0:
|
||||||
context.scene.frame_set(frame_min + frame)
|
|
||||||
|
|
||||||
for pose_bone in pose_bones:
|
|
||||||
key = Psa.Key()
|
|
||||||
|
|
||||||
if pose_bone.parent is not None:
|
|
||||||
pose_bone_matrix = pose_bone.matrix
|
|
||||||
pose_bone_parent_matrix = pose_bone.parent.matrix
|
|
||||||
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
|
||||||
else:
|
|
||||||
if options.root_motion:
|
|
||||||
# Export root motion
|
|
||||||
pose_bone_matrix = armature.matrix_world @ pose_bone.matrix
|
|
||||||
else:
|
|
||||||
pose_bone_matrix = pose_bone.matrix
|
|
||||||
|
|
||||||
location = pose_bone_matrix.to_translation()
|
|
||||||
rotation = pose_bone_matrix.to_quaternion().normalized()
|
|
||||||
|
|
||||||
if pose_bone.parent is not None:
|
|
||||||
rotation.conjugate()
|
|
||||||
|
|
||||||
key.location.x = location.x
|
|
||||||
key.location.y = location.y
|
|
||||||
key.location.z = location.z
|
|
||||||
key.rotation.x = rotation.x
|
|
||||||
key.rotation.y = rotation.y
|
|
||||||
key.rotation.z = rotation.z
|
|
||||||
key.rotation.w = rotation.w
|
|
||||||
key.time = 1.0 / psa_sequence.fps
|
|
||||||
|
|
||||||
psa.keys.append(key)
|
|
||||||
|
|
||||||
psa_sequence.bone_count = len(pose_bones)
|
|
||||||
psa_sequence.track_time = frame_count
|
|
||||||
|
|
||||||
frame_start_index += frame_count
|
|
||||||
|
|
||||||
psa.sequences[export_sequence.name] = psa_sequence
|
|
||||||
|
|
||||||
return psa
|
|
||||||
|
|
||||||
def get_timeline_marker_sequence_frame_ranges(self, object, context, options: PsaBuilderOptions) -> Dict:
|
|
||||||
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
|
||||||
sequence_frame_ranges = dict()
|
|
||||||
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
|
||||||
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
|
|
||||||
|
|
||||||
for marker_name in options.marker_names:
|
|
||||||
marker = context.scene.timeline_markers[marker_name]
|
|
||||||
frame_min = marker.frame
|
|
||||||
# Determine the final frame of the sequence based on the next marker.
|
|
||||||
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
|
||||||
marker_index = sorted_timeline_marker_names.index(marker_name)
|
|
||||||
next_marker_index = marker_index + 1
|
|
||||||
frame_max = 0
|
|
||||||
if next_marker_index < len(sorted_timeline_markers):
|
|
||||||
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
|
||||||
frame_max = sorted_timeline_markers[next_marker_index].frame
|
|
||||||
if options.should_trim_timeline_marker_sequences:
|
|
||||||
nla_strips = get_nla_strips_in_timeframe(object, marker.frame, frame_max)
|
|
||||||
if len(nla_strips) > 0:
|
|
||||||
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
|
||||||
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
|
||||||
else:
|
|
||||||
# No strips in between this marker and the next, just export this as a one-frame animation.
|
|
||||||
frame_max = frame_min
|
|
||||||
else:
|
|
||||||
# There is no next marker.
|
|
||||||
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
|
||||||
for nla_track in object.animation_data.nla_tracks:
|
|
||||||
if nla_track.mute:
|
|
||||||
continue
|
|
||||||
for strip in nla_track.strips:
|
|
||||||
frame_max = max(frame_max, strip.frame_end)
|
|
||||||
|
|
||||||
if frame_min == frame_max:
|
|
||||||
continue
|
continue
|
||||||
|
export_sequence = ExportSequence()
|
||||||
|
export_sequence.nla_state.action = action
|
||||||
|
export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names)
|
||||||
|
frame_min, frame_max = [int(x) for x in action.frame_range]
|
||||||
|
export_sequence.nla_state.frame_min = frame_min
|
||||||
|
export_sequence.nla_state.frame_max = frame_max
|
||||||
|
export_sequence.fps = get_sequence_fps(context, options, [action])
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
pass
|
||||||
|
elif options.sequence_source == 'TIMELINE_MARKERS':
|
||||||
|
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, options)
|
||||||
|
|
||||||
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
|
for name, (frame_min, frame_max) in sequence_frame_ranges.items():
|
||||||
|
export_sequence = ExportSequence()
|
||||||
|
export_sequence.name = name
|
||||||
|
export_sequence.nla_state.action = None
|
||||||
|
export_sequence.nla_state.frame_min = frame_min
|
||||||
|
export_sequence.nla_state.frame_max = frame_max
|
||||||
|
nla_strips_actions = set(
|
||||||
|
map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, frame_min, frame_max)))
|
||||||
|
export_sequence.fps = get_sequence_fps(context, options, nla_strips_actions)
|
||||||
|
export_sequences.append(export_sequence)
|
||||||
|
else:
|
||||||
|
raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
|
||||||
|
|
||||||
return sequence_frame_ranges
|
# Add prefixes and suffices to the names of the export sequences and strip whitespace.
|
||||||
|
for export_sequence in export_sequences:
|
||||||
|
export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'.strip()
|
||||||
|
|
||||||
|
# Now build the PSA sequences.
|
||||||
|
# We actually alter the timeline frame and simply record the resultant pose bone matrices.
|
||||||
|
frame_start_index = 0
|
||||||
|
|
||||||
|
for export_sequence in export_sequences:
|
||||||
|
# Link the action to the animation data and update view layer.
|
||||||
|
animation_data.action = export_sequence.nla_state.action
|
||||||
|
context.view_layer.update()
|
||||||
|
|
||||||
|
frame_min = export_sequence.nla_state.frame_min
|
||||||
|
frame_max = export_sequence.nla_state.frame_max
|
||||||
|
frame_count = frame_max - frame_min + 1
|
||||||
|
|
||||||
|
psa_sequence = Psa.Sequence()
|
||||||
|
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
||||||
|
psa_sequence.frame_count = frame_count
|
||||||
|
psa_sequence.frame_start_index = frame_start_index
|
||||||
|
psa_sequence.fps = export_sequence.fps
|
||||||
|
|
||||||
|
for frame in range(frame_count):
|
||||||
|
context.scene.frame_set(frame_min + frame)
|
||||||
|
|
||||||
|
for pose_bone in pose_bones:
|
||||||
|
key = Psa.Key()
|
||||||
|
|
||||||
|
if pose_bone.parent is not None:
|
||||||
|
pose_bone_matrix = pose_bone.matrix
|
||||||
|
pose_bone_parent_matrix = pose_bone.parent.matrix
|
||||||
|
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
||||||
|
else:
|
||||||
|
if options.root_motion:
|
||||||
|
# Export root motion
|
||||||
|
pose_bone_matrix = armature.matrix_world @ pose_bone.matrix
|
||||||
|
else:
|
||||||
|
pose_bone_matrix = pose_bone.matrix
|
||||||
|
|
||||||
|
location = pose_bone_matrix.to_translation()
|
||||||
|
rotation = pose_bone_matrix.to_quaternion().normalized()
|
||||||
|
|
||||||
|
if pose_bone.parent is not None:
|
||||||
|
rotation.conjugate()
|
||||||
|
|
||||||
|
key.location.x = location.x
|
||||||
|
key.location.y = location.y
|
||||||
|
key.location.z = location.z
|
||||||
|
key.rotation.x = rotation.x
|
||||||
|
key.rotation.y = rotation.y
|
||||||
|
key.rotation.z = rotation.z
|
||||||
|
key.rotation.w = rotation.w
|
||||||
|
key.time = 1.0 / psa_sequence.fps
|
||||||
|
|
||||||
|
psa.keys.append(key)
|
||||||
|
|
||||||
|
psa_sequence.bone_count = len(pose_bones)
|
||||||
|
psa_sequence.track_time = frame_count
|
||||||
|
|
||||||
|
frame_start_index += frame_count
|
||||||
|
|
||||||
|
psa.sequences[export_sequence.name] = psa_sequence
|
||||||
|
|
||||||
|
return psa
|
||||||
|
@ -10,18 +10,13 @@ from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatPrope
|
|||||||
from bpy.types import Action, Operator, PropertyGroup, UIList
|
from bpy.types import Action, Operator, PropertyGroup, UIList
|
||||||
from bpy_extras.io_utils import ExportHelper
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
|
||||||
from .builder import PsaBuilder, PsaBuilderOptions
|
from .builder import PsaBuildOptions, build_psa
|
||||||
from .data import *
|
from .data import *
|
||||||
from ..helpers import *
|
from ..helpers import *
|
||||||
from ..types import BoneGroupListItem
|
from ..types import BoneGroupListItem
|
||||||
|
|
||||||
|
|
||||||
class PsaExporter(object):
|
def export_psa(psa: Psa, path: str):
|
||||||
def __init__(self, psa: Psa):
|
|
||||||
self.psa: Psa = psa
|
|
||||||
|
|
||||||
# This method is shared by both PSA/K file formats, move this?
|
|
||||||
@staticmethod
|
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||||
section = Section()
|
section = Section()
|
||||||
section.name = name
|
section.name = name
|
||||||
@ -32,13 +27,11 @@ class PsaExporter(object):
|
|||||||
if data is not None:
|
if data is not None:
|
||||||
for datum in data:
|
for datum in data:
|
||||||
fp.write(datum)
|
fp.write(datum)
|
||||||
|
with open(path, 'wb') as fp:
|
||||||
def export(self, path: str):
|
write_section(fp, b'ANIMHEAD')
|
||||||
with open(path, 'wb') as fp:
|
write_section(fp, b'BONENAMES', Psa.Bone, psa.bones)
|
||||||
self.write_section(fp, b'ANIMHEAD')
|
write_section(fp, b'ANIMINFO', Psa.Sequence, list(psa.sequences.values()))
|
||||||
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones)
|
write_section(fp, b'ANIMKEYS', Psa.Key, psa.keys)
|
||||||
self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values()))
|
|
||||||
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportActionListItem(PropertyGroup):
|
class PsaExportActionListItem(PropertyGroup):
|
||||||
@ -64,12 +57,26 @@ def should_use_original_sequence_names_updated(_, context):
|
|||||||
update_action_names(context)
|
update_action_names(context)
|
||||||
|
|
||||||
|
|
||||||
|
def psa_export_property_group_animation_data_override_poll(_context, obj):
|
||||||
|
return obj.animation_data is not None
|
||||||
|
|
||||||
|
|
||||||
class PsaExportPropertyGroup(PropertyGroup):
|
class PsaExportPropertyGroup(PropertyGroup):
|
||||||
root_motion: BoolProperty(
|
root_motion: BoolProperty(
|
||||||
name='Root Motion',
|
name='Root Motion',
|
||||||
options=set(),
|
options=set(),
|
||||||
default=False,
|
default=False,
|
||||||
description='When set, the root bone will be transformed as it appears in the scene',
|
description='The root bone will be transformed as it appears in the scene',
|
||||||
|
)
|
||||||
|
should_override_animation_data: BoolProperty(
|
||||||
|
name='Override Animation Data',
|
||||||
|
options=set(),
|
||||||
|
default=False,
|
||||||
|
description='Use the animation data from a different object instead of the selected object'
|
||||||
|
)
|
||||||
|
animation_data_override: PointerProperty(
|
||||||
|
type=bpy.types.Object,
|
||||||
|
poll=psa_export_property_group_animation_data_override_poll
|
||||||
)
|
)
|
||||||
sequence_source: EnumProperty(
|
sequence_source: EnumProperty(
|
||||||
name='Source',
|
name='Source',
|
||||||
@ -191,8 +198,11 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
# SOURCE
|
# SOURCE
|
||||||
layout.prop(pg, 'sequence_source', text='Source')
|
layout.prop(pg, 'sequence_source', text='Source')
|
||||||
|
|
||||||
# ROOT MOTION
|
if pg.sequence_source == 'TIMELINE_MARKERS':
|
||||||
layout.prop(pg, 'root_motion', text='Root Motion')
|
# ANIMDATA SOURCE
|
||||||
|
layout.prop(pg, 'should_override_animation_data')
|
||||||
|
if pg.should_override_animation_data:
|
||||||
|
layout.prop(pg, 'animation_data_override', text='')
|
||||||
|
|
||||||
# SELECT ALL/NONE
|
# SELECT ALL/NONE
|
||||||
row = layout.row(align=True)
|
row = layout.row(align=True)
|
||||||
@ -249,15 +259,17 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
|
layout.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
|
||||||
rows=rows)
|
rows=rows)
|
||||||
|
|
||||||
def should_action_be_selected_by_default(self, action):
|
layout.separator()
|
||||||
return action is not None and action.asset_data is None
|
|
||||||
|
# ROOT MOTION
|
||||||
|
layout.prop(pg, 'root_motion', text='Root Motion')
|
||||||
|
|
||||||
def is_action_for_armature(self, action):
|
def is_action_for_armature(self, action):
|
||||||
if len(action.fcurves) == 0:
|
if len(action.fcurves) == 0:
|
||||||
return False
|
return False
|
||||||
bone_names = set([x.name for x in self.armature.data.bones])
|
bone_names = set([x.name for x in self.armature.data.bones])
|
||||||
for fcurve in action.fcurves:
|
for fcurve in action.fcurves:
|
||||||
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path)
|
match = re.match(r'pose\.bones\["(.+)"].\w+', fcurve.data_path)
|
||||||
if not match:
|
if not match:
|
||||||
continue
|
continue
|
||||||
bone_name = match.group(1)
|
bone_name = match.group(1)
|
||||||
@ -273,7 +285,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
if context.view_layer.objects.active.type != 'ARMATURE':
|
if context.view_layer.objects.active.type != 'ARMATURE':
|
||||||
raise RuntimeError('The selected object must be an armature')
|
raise RuntimeError('The selected object must be an armature')
|
||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, _event):
|
||||||
try:
|
try:
|
||||||
self._check_context(context)
|
self._check_context(context)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
@ -290,7 +302,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
item = pg.action_list.add()
|
item = pg.action_list.add()
|
||||||
item.action = action
|
item.action = action
|
||||||
item.name = action.name
|
item.name = action.name
|
||||||
item.is_selected = self.should_action_be_selected_by_default(action)
|
item.is_selected = False
|
||||||
|
|
||||||
update_action_names(context)
|
update_action_names(context)
|
||||||
|
|
||||||
@ -299,6 +311,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
for marker in context.scene.timeline_markers:
|
for marker in context.scene.timeline_markers:
|
||||||
item = pg.marker_list.add()
|
item = pg.marker_list.add()
|
||||||
item.name = marker.name
|
item.name = marker.name
|
||||||
|
item.is_selected = False
|
||||||
|
|
||||||
if len(pg.action_list) == 0 and len(pg.marker_list) == 0:
|
if len(pg.action_list) == 0 and len(pg.marker_list) == 0:
|
||||||
# If there are no actions at all, we have nothing to export, so just cancel the operation.
|
# If there are no actions at all, we have nothing to export, so just cancel the operation.
|
||||||
@ -318,7 +331,9 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
actions = [x.action for x in pg.action_list if x.is_selected]
|
actions = [x.action for x in pg.action_list if x.is_selected]
|
||||||
marker_names = [x.name for x in pg.marker_list if x.is_selected]
|
marker_names = [x.name for x in pg.marker_list if x.is_selected]
|
||||||
|
|
||||||
options = PsaBuilderOptions()
|
options = PsaBuildOptions()
|
||||||
|
options.should_override_animation_data = pg.should_override_animation_data
|
||||||
|
options.animation_data_override = pg.animation_data_override
|
||||||
options.fps_source = pg.fps_source
|
options.fps_source = pg.fps_source
|
||||||
options.fps_custom = pg.fps_custom
|
options.fps_custom = pg.fps_custom
|
||||||
options.sequence_source = pg.sequence_source
|
options.sequence_source = pg.sequence_source
|
||||||
@ -332,16 +347,14 @@ class PsaExportOperator(Operator, ExportHelper):
|
|||||||
options.sequence_name_suffix = pg.sequence_name_suffix
|
options.sequence_name_suffix = pg.sequence_name_suffix
|
||||||
options.root_motion = pg.root_motion
|
options.root_motion = pg.root_motion
|
||||||
|
|
||||||
builder = PsaBuilder()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
psa = builder.build(context, options)
|
psa = build_psa(context, options)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
exporter = PsaExporter(psa)
|
export_psa(psa, self.filepath)
|
||||||
exporter.export(self.filepath)
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
@ -368,8 +381,7 @@ def filter_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_c
|
|||||||
return flt_flags
|
return flt_flags
|
||||||
|
|
||||||
|
|
||||||
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[
|
def get_visible_sequences(pg: PsaExportPropertyGroup, sequences: bpy.types.bpy_prop_collection) -> List[PsaExportActionListItem]:
|
||||||
PsaExportActionListItem]:
|
|
||||||
visible_sequences = []
|
visible_sequences = []
|
||||||
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
for i, flag in enumerate(filter_sequences(pg, sequences)):
|
||||||
if bool(flag & (1 << 30)):
|
if bool(flag & (1 << 30)):
|
||||||
@ -401,10 +413,9 @@ class PSA_UL_ExportSequenceList(UIList):
|
|||||||
subrow = row.row(align=True)
|
subrow = row.row(align=True)
|
||||||
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
||||||
|
|
||||||
|
def filter_items(self, context, data, prop):
|
||||||
def filter_items(self, context, data, property):
|
|
||||||
pg = context.scene.psa_export
|
pg = context.scene.psa_export
|
||||||
actions = getattr(data, property)
|
actions = getattr(data, prop)
|
||||||
flt_flags = filter_sequences(pg, actions)
|
flt_flags = filter_sequences(pg, actions)
|
||||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
||||||
return flt_flags, flt_neworder
|
return flt_flags, flt_neworder
|
||||||
|
@ -26,198 +26,195 @@ class PsaImportOptions(object):
|
|||||||
self.action_name_prefix = ''
|
self.action_name_prefix = ''
|
||||||
|
|
||||||
|
|
||||||
class PsaImporter(object):
|
def import_psa(psa_reader: PsaReader, armature_object, options: PsaImportOptions):
|
||||||
def __init__(self):
|
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
|
||||||
pass
|
armature_data = armature_object.data
|
||||||
|
|
||||||
def import_psa(self, psa_reader: PsaReader, armature_object, options: PsaImportOptions):
|
class ImportBone(object):
|
||||||
sequences = map(lambda x: psa_reader.sequences[x], options.sequence_names)
|
def __init__(self, psa_bone: Psa.Bone):
|
||||||
armature_data = armature_object.data
|
self.psa_bone: Psa.Bone = psa_bone
|
||||||
|
self.parent: Optional[ImportBone] = None
|
||||||
|
self.armature_bone = None
|
||||||
|
self.pose_bone = None
|
||||||
|
self.orig_loc: Vector = Vector()
|
||||||
|
self.orig_quat: Quaternion = Quaternion()
|
||||||
|
self.post_quat: Quaternion = Quaternion()
|
||||||
|
self.fcurves = []
|
||||||
|
|
||||||
class ImportBone(object):
|
def calculate_fcurve_data(import_bone: ImportBone, key_data: []):
|
||||||
def __init__(self, psa_bone: Psa.Bone):
|
# Convert world-space transforms to local-space transforms.
|
||||||
self.psa_bone: Psa.Bone = psa_bone
|
key_rotation = Quaternion(key_data[0:4])
|
||||||
self.parent: Optional[ImportBone] = None
|
key_location = Vector(key_data[4:])
|
||||||
self.armature_bone = None
|
q = import_bone.post_quat.copy()
|
||||||
self.pose_bone = None
|
q.rotate(import_bone.orig_quat)
|
||||||
self.orig_loc: Vector = Vector()
|
quat = q
|
||||||
self.orig_quat: Quaternion = Quaternion()
|
q = import_bone.post_quat.copy()
|
||||||
self.post_quat: Quaternion = Quaternion()
|
if import_bone.parent is None:
|
||||||
self.fcurves = []
|
q.rotate(key_rotation.conjugated())
|
||||||
|
else:
|
||||||
|
q.rotate(key_rotation)
|
||||||
|
quat.rotate(q.conjugated())
|
||||||
|
loc = key_location - import_bone.orig_loc
|
||||||
|
loc.rotate(import_bone.post_quat.conjugated())
|
||||||
|
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
|
||||||
|
|
||||||
def calculate_fcurve_data(import_bone: ImportBone, key_data: []):
|
# Create an index mapping from bones in the PSA to bones in the target armature.
|
||||||
# Convert world-space transforms to local-space transforms.
|
psa_to_armature_bone_indices = {}
|
||||||
key_rotation = Quaternion(key_data[0:4])
|
armature_bone_names = [x.name for x in armature_data.bones]
|
||||||
key_location = Vector(key_data[4:])
|
psa_bone_names = []
|
||||||
q = import_bone.post_quat.copy()
|
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
||||||
q.rotate(import_bone.orig_quat)
|
psa_bone_name = psa_bone.name.decode('windows-1252')
|
||||||
quat = q
|
psa_bone_names.append(psa_bone_name)
|
||||||
q = import_bone.post_quat.copy()
|
try:
|
||||||
if import_bone.parent is None:
|
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name)
|
||||||
q.rotate(key_rotation.conjugated())
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Report if there are missing bones in the target armature.
|
||||||
|
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
||||||
|
if len(missing_bone_names) > 0:
|
||||||
|
print(
|
||||||
|
f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:')
|
||||||
|
print(list(sorted(missing_bone_names)))
|
||||||
|
del armature_bone_names
|
||||||
|
|
||||||
|
# Create intermediate bone data for import operations.
|
||||||
|
import_bones = []
|
||||||
|
import_bones_dict = dict()
|
||||||
|
|
||||||
|
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
||||||
|
bone_name = psa_bone.name.decode('windows-1252')
|
||||||
|
if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones
|
||||||
|
# PSA bone does not map to armature bone, skip it and leave an empty bone in its place.
|
||||||
|
import_bones.append(None)
|
||||||
|
continue
|
||||||
|
import_bone = ImportBone(psa_bone)
|
||||||
|
import_bone.armature_bone = armature_data.bones[bone_name]
|
||||||
|
import_bone.pose_bone = armature_object.pose.bones[bone_name]
|
||||||
|
import_bones_dict[bone_name] = import_bone
|
||||||
|
import_bones.append(import_bone)
|
||||||
|
|
||||||
|
for import_bone in filter(lambda x: x is not None, import_bones):
|
||||||
|
armature_bone = import_bone.armature_bone
|
||||||
|
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
|
||||||
|
import_bone.parent = import_bones_dict[armature_bone.parent.name]
|
||||||
|
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
||||||
|
if armature_bone.get('orig_quat') is not None:
|
||||||
|
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect
|
||||||
|
# (animations are flipped 180 around Z)
|
||||||
|
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
|
||||||
|
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
|
||||||
|
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
|
||||||
|
else:
|
||||||
|
if import_bone.parent is not None:
|
||||||
|
import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
||||||
|
import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
|
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
||||||
|
import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
|
import_bone.orig_quat.conjugate()
|
||||||
else:
|
else:
|
||||||
q.rotate(key_rotation)
|
import_bone.orig_loc = armature_bone.matrix_local.translation.copy()
|
||||||
quat.rotate(q.conjugated())
|
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
||||||
loc = key_location - import_bone.orig_loc
|
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
||||||
loc.rotate(import_bone.post_quat.conjugated())
|
|
||||||
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
|
|
||||||
|
|
||||||
# Create an index mapping from bones in the PSA to bones in the target armature.
|
# Create and populate the data for new sequences.
|
||||||
psa_to_armature_bone_indices = {}
|
actions = []
|
||||||
armature_bone_names = [x.name for x in armature_data.bones]
|
for sequence in sequences:
|
||||||
psa_bone_names = []
|
# Add the action.
|
||||||
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
sequence_name = sequence.name.decode('windows-1252')
|
||||||
psa_bone_name = psa_bone.name.decode('windows-1252')
|
action_name = options.action_name_prefix + sequence_name
|
||||||
psa_bone_names.append(psa_bone_name)
|
|
||||||
try:
|
|
||||||
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Report if there are missing bones in the target armature.
|
if options.should_overwrite and action_name in bpy.data.actions:
|
||||||
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
action = bpy.data.actions[action_name]
|
||||||
if len(missing_bone_names) > 0:
|
else:
|
||||||
print(
|
action = bpy.data.actions.new(name=action_name)
|
||||||
f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:')
|
|
||||||
print(list(sorted(missing_bone_names)))
|
|
||||||
del armature_bone_names
|
|
||||||
|
|
||||||
# Create intermediate bone data for import operations.
|
if options.should_write_keyframes:
|
||||||
import_bones = []
|
# Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2)
|
||||||
import_bones_dict = dict()
|
while len(action.fcurves) > 0:
|
||||||
|
action.fcurves.remove(action.fcurves[-1])
|
||||||
|
|
||||||
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
# Create f-curves for the rotation and location of each bone.
|
||||||
bone_name = psa_bone.name.decode('windows-1252')
|
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||||
if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones
|
import_bone = import_bones[psa_bone_index]
|
||||||
# PSA bone does not map to armature bone, skip it and leave an empty bone in its place.
|
pose_bone = import_bone.pose_bone
|
||||||
import_bones.append(None)
|
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
||||||
continue
|
location_data_path = pose_bone.path_from_id('location')
|
||||||
import_bone = ImportBone(psa_bone)
|
import_bone.fcurves = [
|
||||||
import_bone.armature_bone = armature_data.bones[bone_name]
|
action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name), # Qw
|
||||||
import_bone.pose_bone = armature_object.pose.bones[bone_name]
|
action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name), # Qx
|
||||||
import_bones_dict[bone_name] = import_bone
|
action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name), # Qy
|
||||||
import_bones.append(import_bone)
|
action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name), # Qz
|
||||||
|
action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name), # Lx
|
||||||
|
action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly
|
||||||
|
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz
|
||||||
|
]
|
||||||
|
|
||||||
for import_bone in filter(lambda x: x is not None, import_bones):
|
# Read the sequence data matrix from the PSA.
|
||||||
armature_bone = import_bone.armature_bone
|
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||||
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
|
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
||||||
import_bone.parent = import_bones_dict[armature_bone.parent.name]
|
|
||||||
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
|
||||||
if armature_bone.get('orig_quat') is not None:
|
|
||||||
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z)
|
|
||||||
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
|
|
||||||
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
|
|
||||||
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
|
|
||||||
else:
|
|
||||||
if import_bone.parent is not None:
|
|
||||||
import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
|
||||||
import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
|
||||||
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
|
||||||
import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
|
||||||
import_bone.orig_quat.conjugate()
|
|
||||||
else:
|
|
||||||
import_bone.orig_loc = armature_bone.matrix_local.translation.copy()
|
|
||||||
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
|
||||||
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
|
||||||
|
|
||||||
# Create and populate the data for new sequences.
|
# Convert the sequence's data from world-space to local-space.
|
||||||
actions = []
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
for sequence in sequences:
|
if import_bone is None:
|
||||||
# Add the action.
|
continue
|
||||||
sequence_name = sequence.name.decode('windows-1252')
|
for frame_index in range(sequence.frame_count):
|
||||||
action_name = options.action_name_prefix + sequence_name
|
# This bone has writeable keyframes for this frame.
|
||||||
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
|
# Calculate the local-space key data for the bone.
|
||||||
|
sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data)
|
||||||
|
|
||||||
if options.should_overwrite and action_name in bpy.data.actions:
|
# Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an
|
||||||
action = bpy.data.actions[action_name]
|
# insufficiently large change in the data from the last written frame.
|
||||||
else:
|
if options.should_clean_keys:
|
||||||
action = bpy.data.actions.new(name=action_name)
|
threshold = 0.001
|
||||||
|
|
||||||
if options.should_write_keyframes:
|
|
||||||
# Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2)
|
|
||||||
while len(action.fcurves) > 0:
|
|
||||||
action.fcurves.remove(action.fcurves[-1])
|
|
||||||
|
|
||||||
# Create f-curves for the rotation and location of each bone.
|
|
||||||
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
|
||||||
import_bone = import_bones[psa_bone_index]
|
|
||||||
pose_bone = import_bone.pose_bone
|
|
||||||
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
|
||||||
location_data_path = pose_bone.path_from_id('location')
|
|
||||||
import_bone.fcurves = [
|
|
||||||
action.fcurves.new(rotation_data_path, index=0, action_group=pose_bone.name), # Qw
|
|
||||||
action.fcurves.new(rotation_data_path, index=1, action_group=pose_bone.name), # Qx
|
|
||||||
action.fcurves.new(rotation_data_path, index=2, action_group=pose_bone.name), # Qy
|
|
||||||
action.fcurves.new(rotation_data_path, index=3, action_group=pose_bone.name), # Qz
|
|
||||||
action.fcurves.new(location_data_path, index=0, action_group=pose_bone.name), # Lx
|
|
||||||
action.fcurves.new(location_data_path, index=1, action_group=pose_bone.name), # Ly
|
|
||||||
action.fcurves.new(location_data_path, index=2, action_group=pose_bone.name), # Lz
|
|
||||||
]
|
|
||||||
|
|
||||||
# Read the sequence data matrix from the PSA.
|
|
||||||
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
|
||||||
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
|
||||||
|
|
||||||
# Convert the sequence's data from world-space to local-space.
|
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
if import_bone is None:
|
if import_bone is None:
|
||||||
continue
|
continue
|
||||||
for frame_index in range(sequence.frame_count):
|
for fcurve_index in range(len(import_bone.fcurves)):
|
||||||
|
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
||||||
|
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||||
|
last_written_datum = 0
|
||||||
|
for frame_index, datum in enumerate(fcurve_frame_data):
|
||||||
|
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
||||||
|
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
||||||
|
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
||||||
|
else:
|
||||||
|
last_written_datum = datum
|
||||||
|
|
||||||
|
# Write the keyframes out!
|
||||||
|
for frame_index in range(sequence.frame_count):
|
||||||
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
|
if import_bone is None:
|
||||||
|
continue
|
||||||
|
bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index])
|
||||||
|
if bone_has_writeable_keyframes:
|
||||||
# This bone has writeable keyframes for this frame.
|
# This bone has writeable keyframes for this frame.
|
||||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
# Calculate the local-space key data for the bone.
|
for fcurve, should_write, datum in zip(import_bone.fcurves,
|
||||||
sequence_data_matrix[frame_index, bone_index] = calculate_fcurve_data(import_bone, key_data)
|
keyframe_write_matrix[frame_index, bone_index],
|
||||||
|
key_data):
|
||||||
|
if should_write:
|
||||||
|
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
||||||
|
|
||||||
# Clean the keyframe data. This is accomplished by writing zeroes to the write matrix when there is an
|
# Write
|
||||||
# insufficiently large change in the data from the last written frame.
|
if options.should_write_metadata:
|
||||||
if options.should_clean_keys:
|
action['psa_sequence_name'] = sequence_name
|
||||||
threshold = 0.001
|
action['psa_sequence_fps'] = sequence.fps
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
|
||||||
if import_bone is None:
|
|
||||||
continue
|
|
||||||
for fcurve_index in range(len(import_bone.fcurves)):
|
|
||||||
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
|
||||||
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
|
||||||
last_written_datum = 0
|
|
||||||
for frame_index, datum in enumerate(fcurve_frame_data):
|
|
||||||
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
|
||||||
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
|
||||||
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
|
||||||
else:
|
|
||||||
last_written_datum = datum
|
|
||||||
|
|
||||||
# Write the keyframes out!
|
action.use_fake_user = options.should_use_fake_user
|
||||||
for frame_index in range(sequence.frame_count):
|
|
||||||
for bone_index, import_bone in enumerate(import_bones):
|
|
||||||
if import_bone is None:
|
|
||||||
continue
|
|
||||||
bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index])
|
|
||||||
if bone_has_writeable_keyframes:
|
|
||||||
# This bone has writeable keyframes for this frame.
|
|
||||||
key_data = sequence_data_matrix[frame_index, bone_index]
|
|
||||||
for fcurve, should_write, datum in zip(import_bone.fcurves,
|
|
||||||
keyframe_write_matrix[frame_index, bone_index],
|
|
||||||
key_data):
|
|
||||||
if should_write:
|
|
||||||
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
|
||||||
|
|
||||||
# Write
|
actions.append(action)
|
||||||
if options.should_write_metadata:
|
|
||||||
action['psa_sequence_name'] = sequence_name
|
|
||||||
action['psa_sequence_fps'] = sequence.fps
|
|
||||||
|
|
||||||
action.use_fake_user = options.should_use_fake_user
|
# If the user specifies, store the new animations as strips on a non-contributing NLA track.
|
||||||
|
if options.should_stash:
|
||||||
actions.append(action)
|
if armature_object.animation_data is None:
|
||||||
|
armature_object.animation_data_create()
|
||||||
# If the user specifies, store the new animations as strips on a non-contributing NLA track.
|
for action in actions:
|
||||||
if options.should_stash:
|
nla_track = armature_object.animation_data.nla_tracks.new()
|
||||||
if armature_object.animation_data is None:
|
nla_track.name = action.name
|
||||||
armature_object.animation_data_create()
|
nla_track.mute = True
|
||||||
for action in actions:
|
nla_track.strips.new(name=action.name, start=0, action=action)
|
||||||
nla_track = armature_object.animation_data.nla_tracks.new()
|
|
||||||
nla_track.name = action.name
|
|
||||||
nla_track.mute = True
|
|
||||||
nla_track.strips.new(name=action.name, start=0, action=action)
|
|
||||||
|
|
||||||
|
|
||||||
class PsaImportActionListItem(PropertyGroup):
|
class PsaImportActionListItem(PropertyGroup):
|
||||||
@ -389,6 +386,9 @@ class PsaImportSequencesFromText(Operator):
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
pg = context.scene.psa_import
|
pg = context.scene.psa_import
|
||||||
|
if pg.select_text is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No text block selected')
|
||||||
|
return {'CANCELLED'}
|
||||||
contents = pg.select_text.as_string()
|
contents = pg.select_text.as_string()
|
||||||
count = 0
|
count = 0
|
||||||
for line in contents.split('\n'):
|
for line in contents.split('\n'):
|
||||||
@ -403,7 +403,7 @@ class PsaImportSequencesFromText(Operator):
|
|||||||
class PsaImportSequencesSelectAll(Operator):
|
class PsaImportSequencesSelectAll(Operator):
|
||||||
bl_idname = 'psa_import.sequences_select_all'
|
bl_idname = 'psa_import.sequences_select_all'
|
||||||
bl_label = 'All'
|
bl_label = 'All'
|
||||||
bl_description = 'Select all visible sequences'
|
bl_description = 'Select all sequences'
|
||||||
bl_options = {'INTERNAL'}
|
bl_options = {'INTERNAL'}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -589,7 +589,7 @@ class PsaImportOperator(Operator):
|
|||||||
options.should_write_metadata = pg.should_write_metadata
|
options.should_write_metadata = pg.should_write_metadata
|
||||||
options.should_write_keyframes = pg.should_write_keyframes
|
options.should_write_keyframes = pg.should_write_keyframes
|
||||||
|
|
||||||
PsaImporter().import_psa(psa_reader, context.view_layer.objects.active, options)
|
import_psa(psa_reader, context.view_layer.objects.active, options)
|
||||||
|
|
||||||
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
||||||
|
|
||||||
|
@ -7,8 +7,8 @@ from .data import *
|
|||||||
|
|
||||||
class PsaReader(object):
|
class PsaReader(object):
|
||||||
"""
|
"""
|
||||||
This class reads the sequences and bone information immediately upon instantiation and hold onto a file handle.
|
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
|
||||||
The key data is not read into memory upon instantiation due to it's potentially very large size.
|
The keyframe data is not read into memory upon instantiation due to it's potentially very large size.
|
||||||
To read the key data for a particular sequence, call `read_sequence_keys`.
|
To read the key data for a particular sequence, call `read_sequence_keys`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
from .data import *
|
from .data import *
|
||||||
from ..helpers import *
|
from ..helpers import *
|
||||||
|
import bmesh
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
|
||||||
class PskInputObjects(object):
|
class PskInputObjects(object):
|
||||||
@ -10,227 +13,253 @@ class PskInputObjects(object):
|
|||||||
self.armature_object = None
|
self.armature_object = None
|
||||||
|
|
||||||
|
|
||||||
class PskBuilderOptions(object):
|
class PskBuildOptions(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.bone_filter_mode = 'ALL'
|
self.bone_filter_mode = 'ALL'
|
||||||
self.bone_group_indices = []
|
self.bone_group_indices: List[int] = []
|
||||||
|
self.use_raw_mesh_data = True
|
||||||
|
self.material_names: List[str] = []
|
||||||
|
|
||||||
|
|
||||||
class PskBuilder(object):
|
def get_psk_input_objects(context) -> PskInputObjects:
|
||||||
def __init__(self):
|
input_objects = PskInputObjects()
|
||||||
pass
|
for selected_object in context.view_layer.objects.selected:
|
||||||
|
if selected_object.type != 'MESH':
|
||||||
|
raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh')
|
||||||
|
|
||||||
@staticmethod
|
input_objects.mesh_objects = context.view_layer.objects.selected
|
||||||
def get_input_objects(context) -> PskInputObjects:
|
|
||||||
input_objects = PskInputObjects()
|
|
||||||
for obj in context.view_layer.objects.selected:
|
|
||||||
if obj.type != 'MESH':
|
|
||||||
raise RuntimeError(f'Selected object "{obj.name}" is not a mesh')
|
|
||||||
|
|
||||||
input_objects.mesh_objects = context.view_layer.objects.selected
|
if len(input_objects.mesh_objects) == 0:
|
||||||
|
raise RuntimeError('At least one mesh must be selected')
|
||||||
|
|
||||||
if len(input_objects.mesh_objects) == 0:
|
for mesh_object in input_objects.mesh_objects:
|
||||||
raise RuntimeError('At least one mesh must be selected')
|
if len(mesh_object.data.materials) == 0:
|
||||||
|
raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material')
|
||||||
|
|
||||||
for obj in input_objects.mesh_objects:
|
# Ensure that there are either no armature modifiers (static mesh)
|
||||||
if len(obj.data.materials) == 0:
|
# or that there is exactly one armature modifier object shared between
|
||||||
raise RuntimeError(f'Mesh "{obj.name}" must have at least one material')
|
# all selected meshes
|
||||||
|
armature_modifier_objects = set()
|
||||||
|
|
||||||
# Ensure that there are either no armature modifiers (static mesh)
|
for mesh_object in input_objects.mesh_objects:
|
||||||
# or that there is exactly one armature modifier object shared between
|
modifiers = [x for x in mesh_object.modifiers if x.type == 'ARMATURE']
|
||||||
# all selected meshes
|
if len(modifiers) == 0:
|
||||||
armature_modifier_objects = set()
|
continue
|
||||||
|
elif len(modifiers) > 1:
|
||||||
|
raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier')
|
||||||
|
armature_modifier_objects.add(modifiers[0].object)
|
||||||
|
|
||||||
for obj in input_objects.mesh_objects:
|
if len(armature_modifier_objects) > 1:
|
||||||
modifiers = [x for x in obj.modifiers if x.type == 'ARMATURE']
|
raise RuntimeError('All selected meshes must have the same armature modifier')
|
||||||
if len(modifiers) == 0:
|
elif len(armature_modifier_objects) == 1:
|
||||||
continue
|
input_objects.armature_object = list(armature_modifier_objects)[0]
|
||||||
elif len(modifiers) > 1:
|
|
||||||
raise RuntimeError(f'Mesh "{obj.name}" must have only one armature modifier')
|
|
||||||
armature_modifier_objects.add(modifiers[0].object)
|
|
||||||
|
|
||||||
if len(armature_modifier_objects) > 1:
|
return input_objects
|
||||||
raise RuntimeError('All selected meshes must have the same armature modifier')
|
|
||||||
elif len(armature_modifier_objects) == 1:
|
|
||||||
input_objects.armature_object = list(armature_modifier_objects)[0]
|
|
||||||
|
|
||||||
return input_objects
|
|
||||||
|
|
||||||
def build(self, context, options: PskBuilderOptions) -> Psk:
|
def build_psk(context, options: PskBuildOptions) -> Psk:
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
input_objects = get_psk_input_objects(context)
|
||||||
|
|
||||||
armature_object = input_objects.armature_object
|
armature_object = input_objects.armature_object
|
||||||
|
|
||||||
psk = Psk()
|
psk = Psk()
|
||||||
bones = []
|
bones = []
|
||||||
materials = OrderedDict()
|
|
||||||
|
|
||||||
if armature_object is None:
|
if armature_object is None:
|
||||||
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
|
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
|
||||||
# that a PSK file must have at least one bone.
|
# that a PSK file must have at least one bone.
|
||||||
|
psk_bone = Psk.Bone()
|
||||||
|
psk_bone.name = bytes('root', encoding='windows-1252')
|
||||||
|
psk_bone.flags = 0
|
||||||
|
psk_bone.children_count = 0
|
||||||
|
psk_bone.parent_index = 0
|
||||||
|
psk_bone.location = Vector3.zero()
|
||||||
|
psk_bone.rotation = Quaternion.identity()
|
||||||
|
psk.bones.append(psk_bone)
|
||||||
|
else:
|
||||||
|
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
|
||||||
|
bones = [armature_object.data.bones[bone_name] for bone_name in bone_names]
|
||||||
|
|
||||||
|
# Check that all bone names are valid.
|
||||||
|
check_bone_names(map(lambda x: x.name, bones))
|
||||||
|
|
||||||
|
for bone in bones:
|
||||||
psk_bone = Psk.Bone()
|
psk_bone = Psk.Bone()
|
||||||
psk_bone.name = bytes('root', encoding='windows-1252')
|
psk_bone.name = bytes(bone.name, encoding='windows-1252')
|
||||||
psk_bone.flags = 0
|
psk_bone.flags = 0
|
||||||
psk_bone.children_count = 0
|
psk_bone.children_count = 0
|
||||||
psk_bone.parent_index = 0
|
|
||||||
psk_bone.location = Vector3.zero()
|
try:
|
||||||
psk_bone.rotation = Quaternion.identity()
|
parent_index = bones.index(bone.parent)
|
||||||
|
psk_bone.parent_index = parent_index
|
||||||
|
psk.bones[parent_index].children_count += 1
|
||||||
|
except ValueError:
|
||||||
|
psk_bone.parent_index = -1
|
||||||
|
|
||||||
|
if bone.parent is not None:
|
||||||
|
rotation = bone.matrix.to_quaternion().conjugated()
|
||||||
|
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
||||||
|
parent_head = quat_parent @ bone.parent.head
|
||||||
|
parent_tail = quat_parent @ bone.parent.tail
|
||||||
|
location = (parent_tail - parent_head) + bone.head
|
||||||
|
else:
|
||||||
|
local_matrix = armature_object.matrix_local
|
||||||
|
location = local_matrix @ bone.head
|
||||||
|
rot_matrix = bone.matrix @ local_matrix.to_3x3()
|
||||||
|
rotation = rot_matrix.to_quaternion()
|
||||||
|
|
||||||
|
psk_bone.location.x = location.x
|
||||||
|
psk_bone.location.y = location.y
|
||||||
|
psk_bone.location.z = location.z
|
||||||
|
|
||||||
|
psk_bone.rotation.w = rotation.w
|
||||||
|
psk_bone.rotation.x = rotation.x
|
||||||
|
psk_bone.rotation.y = rotation.y
|
||||||
|
psk_bone.rotation.z = rotation.z
|
||||||
|
|
||||||
psk.bones.append(psk_bone)
|
psk.bones.append(psk_bone)
|
||||||
|
|
||||||
|
# MATERIALS
|
||||||
|
material_names = options.material_names
|
||||||
|
|
||||||
|
for material_name in material_names:
|
||||||
|
psk_material = Psk.Material()
|
||||||
|
psk_material.name = bytes(material_name, encoding='windows-1252')
|
||||||
|
psk_material.texture_index = len(psk.materials)
|
||||||
|
psk.materials.append(psk_material)
|
||||||
|
|
||||||
|
for input_mesh_object in input_objects.mesh_objects:
|
||||||
|
|
||||||
|
# MATERIALS
|
||||||
|
material_indices = [material_names.index(material.name) for material in input_mesh_object.data.materials]
|
||||||
|
|
||||||
|
if options.use_raw_mesh_data:
|
||||||
|
mesh_object = input_mesh_object
|
||||||
|
mesh_data = input_mesh_object.data
|
||||||
else:
|
else:
|
||||||
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
|
# Create a copy of the mesh object after non-armature modifiers are applied.
|
||||||
bones = [armature_object.data.bones[bone_name] for bone_name in bone_names]
|
|
||||||
|
|
||||||
for bone in bones:
|
# Temporarily deactivate any armature modifiers on the input mesh object.
|
||||||
psk_bone = Psk.Bone()
|
active_armature_modifiers = [x for x in filter(lambda x: x.type == 'ARMATURE' and x.is_active, input_mesh_object.modifiers)]
|
||||||
psk_bone.name = bytes(bone.name, encoding='windows-1252')
|
for modifier in active_armature_modifiers:
|
||||||
psk_bone.flags = 0
|
modifier.show_viewport = False
|
||||||
psk_bone.children_count = 0
|
|
||||||
|
|
||||||
|
depsgraph = context.evaluated_depsgraph_get()
|
||||||
|
bm = bmesh.new()
|
||||||
|
bm.from_object(input_mesh_object, depsgraph)
|
||||||
|
mesh_data = bpy.data.meshes.new('')
|
||||||
|
bm.to_mesh(mesh_data)
|
||||||
|
del bm
|
||||||
|
mesh_object = bpy.data.objects.new('', mesh_data)
|
||||||
|
mesh_object.matrix_world = input_mesh_object.matrix_world
|
||||||
|
|
||||||
|
# Copy the vertex groups
|
||||||
|
for vertex_group in input_mesh_object.vertex_groups:
|
||||||
|
mesh_object.vertex_groups.new(name=vertex_group.name)
|
||||||
|
|
||||||
|
# Reactivate previously active armature modifiers
|
||||||
|
for modifier in active_armature_modifiers:
|
||||||
|
modifier.show_viewport = True
|
||||||
|
|
||||||
|
vertex_offset = len(psk.points)
|
||||||
|
|
||||||
|
# VERTICES
|
||||||
|
for vertex in mesh_data.vertices:
|
||||||
|
point = Vector3()
|
||||||
|
v = mesh_object.matrix_world @ vertex.co
|
||||||
|
point.x = v.x
|
||||||
|
point.y = v.y
|
||||||
|
point.z = v.z
|
||||||
|
psk.points.append(point)
|
||||||
|
|
||||||
|
uv_layer = mesh_data.uv_layers.active.data
|
||||||
|
|
||||||
|
# WEDGES
|
||||||
|
mesh_data.calc_loop_triangles()
|
||||||
|
|
||||||
|
# Build a list of non-unique wedges.
|
||||||
|
wedges = []
|
||||||
|
for loop_index, loop in enumerate(mesh_data.loops):
|
||||||
|
wedge = Psk.Wedge()
|
||||||
|
wedge.point_index = loop.vertex_index + vertex_offset
|
||||||
|
wedge.u, wedge.v = uv_layer[loop_index].uv
|
||||||
|
wedge.v = 1.0 - wedge.v
|
||||||
|
wedges.append(wedge)
|
||||||
|
|
||||||
|
# Assign material indices to the wedges.
|
||||||
|
for triangle in mesh_data.loop_triangles:
|
||||||
|
for loop_index in triangle.loops:
|
||||||
|
wedges[loop_index].material_index = material_indices[triangle.material_index]
|
||||||
|
|
||||||
|
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
|
||||||
|
wedge_indices = {}
|
||||||
|
loop_wedge_indices = [-1] * len(mesh_data.loops)
|
||||||
|
for loop_index, wedge in enumerate(wedges):
|
||||||
|
wedge_hash = hash(wedge)
|
||||||
|
if wedge_hash in wedge_indices:
|
||||||
|
loop_wedge_indices[loop_index] = wedge_indices[wedge_hash]
|
||||||
|
else:
|
||||||
|
wedge_index = len(psk.wedges)
|
||||||
|
wedge_indices[wedge_hash] = wedge_index
|
||||||
|
psk.wedges.append(wedge)
|
||||||
|
loop_wedge_indices[loop_index] = wedge_index
|
||||||
|
|
||||||
|
# FACES
|
||||||
|
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
|
||||||
|
for f in mesh_data.loop_triangles:
|
||||||
|
face = Psk.Face()
|
||||||
|
face.material_index = material_indices[f.material_index]
|
||||||
|
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
|
||||||
|
face.wedge_indices[1] = loop_wedge_indices[f.loops[1]]
|
||||||
|
face.wedge_indices[2] = loop_wedge_indices[f.loops[0]]
|
||||||
|
face.smoothing_groups = poly_groups[f.polygon_index]
|
||||||
|
psk.faces.append(face)
|
||||||
|
|
||||||
|
# WEIGHTS
|
||||||
|
if armature_object is not None:
|
||||||
|
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
|
||||||
|
# we must filter them out and not export any weights for these vertex groups.
|
||||||
|
bone_names = [x.name for x in bones]
|
||||||
|
vertex_group_names = [x.name for x in mesh_object.vertex_groups]
|
||||||
|
vertex_group_bone_indices = dict()
|
||||||
|
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
||||||
try:
|
try:
|
||||||
parent_index = bones.index(bone.parent)
|
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name)
|
||||||
psk_bone.parent_index = parent_index
|
|
||||||
psk.bones[parent_index].children_count += 1
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
psk_bone.parent_index = 0
|
# The vertex group does not have a matching bone in the list of bones to be exported.
|
||||||
|
# Check to see if there is an associated bone for this vertex group that exists in the armature.
|
||||||
if bone.parent is not None:
|
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
|
||||||
rotation = bone.matrix.to_quaternion()
|
# weighting the vertices belonging to this vertex group.
|
||||||
rotation.x = -rotation.x
|
if vertex_group_name in armature_object.data.bones:
|
||||||
rotation.y = -rotation.y
|
bone = armature_object.data.bones[vertex_group_name]
|
||||||
rotation.z = -rotation.z
|
while bone is not None:
|
||||||
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
try:
|
||||||
parent_head = quat_parent @ bone.parent.head
|
bone_index = bone_names.index(bone.name)
|
||||||
parent_tail = quat_parent @ bone.parent.tail
|
vertex_group_bone_indices[vertex_group_index] = bone_index
|
||||||
location = (parent_tail - parent_head) + bone.head
|
break
|
||||||
else:
|
except ValueError:
|
||||||
location = armature_object.matrix_local @ bone.head
|
bone = bone.parent
|
||||||
rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3()
|
for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups):
|
||||||
rotation = rot_matrix.to_quaternion()
|
if vertex_group_index not in vertex_group_bone_indices:
|
||||||
|
# Vertex group has no associated bone, skip it.
|
||||||
psk_bone.location.x = location.x
|
continue
|
||||||
psk_bone.location.y = location.y
|
bone_index = vertex_group_bone_indices[vertex_group_index]
|
||||||
psk_bone.location.z = location.z
|
for vertex_index in range(len(mesh_data.vertices)):
|
||||||
|
|
||||||
psk_bone.rotation.x = rotation.x
|
|
||||||
psk_bone.rotation.y = rotation.y
|
|
||||||
psk_bone.rotation.z = rotation.z
|
|
||||||
psk_bone.rotation.w = rotation.w
|
|
||||||
|
|
||||||
psk.bones.append(psk_bone)
|
|
||||||
|
|
||||||
for object in input_objects.mesh_objects:
|
|
||||||
vertex_offset = len(psk.points)
|
|
||||||
|
|
||||||
# VERTICES
|
|
||||||
for vertex in object.data.vertices:
|
|
||||||
point = Vector3()
|
|
||||||
v = object.matrix_world @ vertex.co
|
|
||||||
point.x = v.x
|
|
||||||
point.y = v.y
|
|
||||||
point.z = v.z
|
|
||||||
psk.points.append(point)
|
|
||||||
|
|
||||||
uv_layer = object.data.uv_layers.active.data
|
|
||||||
|
|
||||||
# MATERIALS
|
|
||||||
material_indices = []
|
|
||||||
for i, m in enumerate(object.data.materials):
|
|
||||||
if m is None:
|
|
||||||
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
|
|
||||||
if m.name in materials:
|
|
||||||
# Material already evaluated, just get its index.
|
|
||||||
material_index = list(materials.keys()).index(m.name)
|
|
||||||
else:
|
|
||||||
# New material.
|
|
||||||
material = Psk.Material()
|
|
||||||
material.name = bytes(m.name, encoding='utf-8')
|
|
||||||
material.texture_index = len(psk.materials)
|
|
||||||
psk.materials.append(material)
|
|
||||||
materials[m.name] = m
|
|
||||||
material_index = material.texture_index
|
|
||||||
material_indices.append(material_index)
|
|
||||||
|
|
||||||
# WEDGES
|
|
||||||
object.data.calc_loop_triangles()
|
|
||||||
|
|
||||||
# Build a list of non-unique wedges.
|
|
||||||
wedges = []
|
|
||||||
for loop_index, loop in enumerate(object.data.loops):
|
|
||||||
wedge = Psk.Wedge()
|
|
||||||
wedge.point_index = loop.vertex_index + vertex_offset
|
|
||||||
wedge.u, wedge.v = uv_layer[loop_index].uv
|
|
||||||
wedge.v = 1.0 - wedge.v
|
|
||||||
wedges.append(wedge)
|
|
||||||
|
|
||||||
# Assign material indices to the wedges.
|
|
||||||
for triangle in object.data.loop_triangles:
|
|
||||||
for loop_index in triangle.loops:
|
|
||||||
wedges[loop_index].material_index = material_indices[triangle.material_index]
|
|
||||||
|
|
||||||
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
|
|
||||||
wedge_indices = {}
|
|
||||||
loop_wedge_indices = [-1] * len(object.data.loops)
|
|
||||||
for loop_index, wedge in enumerate(wedges):
|
|
||||||
wedge_hash = hash(wedge)
|
|
||||||
if wedge_hash in wedge_indices:
|
|
||||||
loop_wedge_indices[loop_index] = wedge_indices[wedge_hash]
|
|
||||||
else:
|
|
||||||
wedge_index = len(psk.wedges)
|
|
||||||
wedge_indices[wedge_hash] = wedge_index
|
|
||||||
psk.wedges.append(wedge)
|
|
||||||
loop_wedge_indices[loop_index] = wedge_index
|
|
||||||
|
|
||||||
# FACES
|
|
||||||
poly_groups, groups = object.data.calc_smooth_groups(use_bitflags=True)
|
|
||||||
for f in object.data.loop_triangles:
|
|
||||||
face = Psk.Face()
|
|
||||||
face.material_index = material_indices[f.material_index]
|
|
||||||
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
|
|
||||||
face.wedge_indices[1] = loop_wedge_indices[f.loops[1]]
|
|
||||||
face.wedge_indices[2] = loop_wedge_indices[f.loops[0]]
|
|
||||||
face.smoothing_groups = poly_groups[f.polygon_index]
|
|
||||||
psk.faces.append(face)
|
|
||||||
|
|
||||||
# WEIGHTS
|
|
||||||
if armature_object is not None:
|
|
||||||
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
|
|
||||||
# we must filter them out and not export any weights for these vertex groups.
|
|
||||||
bone_names = [x.name for x in bones]
|
|
||||||
vertex_group_names = [x.name for x in object.vertex_groups]
|
|
||||||
vertex_group_bone_indices = dict()
|
|
||||||
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
|
||||||
try:
|
try:
|
||||||
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name)
|
weight = vertex_group.weight(vertex_index)
|
||||||
except ValueError:
|
except RuntimeError:
|
||||||
# The vertex group does not have a matching bone in the list of bones to be exported.
|
|
||||||
# Check to see if there is an associated bone for this vertex group that exists in the armature.
|
|
||||||
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
|
|
||||||
# weighting the vertices belonging to this vertex group.
|
|
||||||
if vertex_group_name in armature_object.data.bones:
|
|
||||||
bone = armature_object.data.bones[vertex_group_name]
|
|
||||||
while bone is not None:
|
|
||||||
try:
|
|
||||||
bone_index = bone_names.index(bone.name)
|
|
||||||
vertex_group_bone_indices[vertex_group_index] = bone_index
|
|
||||||
break
|
|
||||||
except ValueError:
|
|
||||||
bone = bone.parent
|
|
||||||
for vertex_group_index, vertex_group in enumerate(object.vertex_groups):
|
|
||||||
if vertex_group_index not in vertex_group_bone_indices:
|
|
||||||
# Vertex group has no associated bone, skip it.
|
|
||||||
continue
|
continue
|
||||||
bone_index = vertex_group_bone_indices[vertex_group_index]
|
if weight == 0.0:
|
||||||
for vertex_index in range(len(object.data.vertices)):
|
continue
|
||||||
try:
|
w = Psk.Weight()
|
||||||
weight = vertex_group.weight(vertex_index)
|
w.bone_index = bone_index
|
||||||
except RuntimeError:
|
w.point_index = vertex_offset + vertex_index
|
||||||
continue
|
w.weight = weight
|
||||||
if weight == 0.0:
|
psk.weights.append(w)
|
||||||
continue
|
|
||||||
w = Psk.Weight()
|
|
||||||
w.bone_index = bone_index
|
|
||||||
w.point_index = vertex_offset + vertex_index
|
|
||||||
w.weight = weight
|
|
||||||
psk.weights.append(w)
|
|
||||||
|
|
||||||
return psk
|
if not options.use_raw_mesh_data:
|
||||||
|
bpy.data.objects.remove(mesh_object)
|
||||||
|
bpy.data.meshes.remove(mesh_data)
|
||||||
|
del mesh_data
|
||||||
|
|
||||||
|
return psk
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
from bpy.props import StringProperty, CollectionProperty, IntProperty, EnumProperty
|
from bpy.props import BoolProperty, StringProperty, CollectionProperty, IntProperty, EnumProperty, PointerProperty
|
||||||
from bpy.types import Operator, PropertyGroup
|
from bpy.types import Operator, PropertyGroup, UIList, Material
|
||||||
from bpy_extras.io_utils import ExportHelper
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
|
||||||
from .builder import PskBuilder, PskBuilderOptions
|
from .builder import build_psk, PskBuildOptions, get_psk_input_objects
|
||||||
from .data import *
|
from .data import *
|
||||||
from ..helpers import populate_bone_group_list
|
from ..helpers import populate_bone_group_list
|
||||||
from ..types import BoneGroupListItem
|
from ..types import BoneGroupListItem
|
||||||
@ -15,55 +15,50 @@ MAX_BONE_COUNT = 256
|
|||||||
MAX_MATERIAL_COUNT = 256
|
MAX_MATERIAL_COUNT = 256
|
||||||
|
|
||||||
|
|
||||||
class PskExporter(object):
|
def _write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||||
|
section = Section()
|
||||||
|
section.name = name
|
||||||
|
if data_type is not None and data is not None:
|
||||||
|
section.data_size = sizeof(data_type)
|
||||||
|
section.data_count = len(data)
|
||||||
|
fp.write(section)
|
||||||
|
if data is not None:
|
||||||
|
for datum in data:
|
||||||
|
fp.write(datum)
|
||||||
|
|
||||||
def __init__(self, psk: Psk):
|
|
||||||
self.psk: Psk = psk
|
|
||||||
|
|
||||||
@staticmethod
|
def export_psk(psk: Psk, path: str):
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
if len(psk.wedges) > MAX_WEDGE_COUNT:
|
||||||
section = Section()
|
raise RuntimeError(f'Number of wedges ({len(psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
||||||
section.name = name
|
if len(psk.bones) > MAX_BONE_COUNT:
|
||||||
if data_type is not None and data is not None:
|
raise RuntimeError(f'Number of bones ({len(psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
||||||
section.data_size = sizeof(data_type)
|
if len(psk.points) > MAX_POINT_COUNT:
|
||||||
section.data_count = len(data)
|
raise RuntimeError(f'Numbers of vertices ({len(psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
||||||
fp.write(section)
|
if len(psk.materials) > MAX_MATERIAL_COUNT:
|
||||||
if data is not None:
|
raise RuntimeError(f'Number of materials ({len(psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
||||||
for datum in data:
|
|
||||||
fp.write(datum)
|
|
||||||
|
|
||||||
def export(self, path: str):
|
with open(path, 'wb') as fp:
|
||||||
if len(self.psk.wedges) > MAX_WEDGE_COUNT:
|
_write_section(fp, b'ACTRHEAD')
|
||||||
raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
_write_section(fp, b'PNTS0000', Vector3, psk.points)
|
||||||
if len(self.psk.bones) > MAX_BONE_COUNT:
|
|
||||||
raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
|
||||||
if len(self.psk.points) > MAX_POINT_COUNT:
|
|
||||||
raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
|
||||||
if len(self.psk.materials) > MAX_MATERIAL_COUNT:
|
|
||||||
raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
|
||||||
|
|
||||||
with open(path, 'wb') as fp:
|
wedges = []
|
||||||
self.write_section(fp, b'ACTRHEAD')
|
for index, w in enumerate(psk.wedges):
|
||||||
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points)
|
wedge = Psk.Wedge16()
|
||||||
|
wedge.material_index = w.material_index
|
||||||
|
wedge.u = w.u
|
||||||
|
wedge.v = w.v
|
||||||
|
wedge.point_index = w.point_index
|
||||||
|
wedges.append(wedge)
|
||||||
|
|
||||||
wedges = []
|
_write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
||||||
for index, w in enumerate(self.psk.wedges):
|
_write_section(fp, b'FACE0000', Psk.Face, psk.faces)
|
||||||
wedge = Psk.Wedge16()
|
_write_section(fp, b'MATT0000', Psk.Material, psk.materials)
|
||||||
wedge.material_index = w.material_index
|
_write_section(fp, b'REFSKELT', Psk.Bone, psk.bones)
|
||||||
wedge.u = w.u
|
_write_section(fp, b'RAWWEIGHTS', Psk.Weight, psk.weights)
|
||||||
wedge.v = w.v
|
|
||||||
wedge.point_index = w.point_index
|
|
||||||
wedges.append(wedge)
|
|
||||||
|
|
||||||
self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
|
||||||
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces)
|
|
||||||
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials)
|
|
||||||
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones)
|
|
||||||
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights)
|
|
||||||
|
|
||||||
|
|
||||||
def is_bone_filter_mode_item_available(context, identifier):
|
def is_bone_filter_mode_item_available(context, identifier):
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
input_objects = get_psk_input_objects(context)
|
||||||
armature_object = input_objects.armature_object
|
armature_object = input_objects.armature_object
|
||||||
if identifier == 'BONE_GROUPS':
|
if identifier == 'BONE_GROUPS':
|
||||||
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
|
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
|
||||||
@ -72,6 +67,75 @@ def is_bone_filter_mode_item_available(context, identifier):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class PSK_UL_MaterialList(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
row = layout.row()
|
||||||
|
row.label(text=str(item.material_name), icon='MATERIAL')
|
||||||
|
|
||||||
|
|
||||||
|
class MaterialListItem(PropertyGroup):
|
||||||
|
material_name: StringProperty()
|
||||||
|
index: IntProperty()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.material_name
|
||||||
|
|
||||||
|
|
||||||
|
def populate_material_list(mesh_objects, material_list):
|
||||||
|
material_list.clear()
|
||||||
|
|
||||||
|
material_names = []
|
||||||
|
for mesh_object in mesh_objects:
|
||||||
|
for i, material in enumerate(mesh_object.data.materials):
|
||||||
|
# TODO: put this in the poll arg?
|
||||||
|
if material is None:
|
||||||
|
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
|
||||||
|
if material.name not in material_names:
|
||||||
|
material_names.append(material.name)
|
||||||
|
|
||||||
|
for index, material_name in enumerate(material_names):
|
||||||
|
m = material_list.add()
|
||||||
|
m.material_name = material_name
|
||||||
|
m.index = index
|
||||||
|
|
||||||
|
|
||||||
|
class PskMaterialListItemMoveUp(Operator):
|
||||||
|
bl_idname = 'psk_export.material_list_item_move_up'
|
||||||
|
bl_label = 'Move Up'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material up one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = context.scene.psk_export
|
||||||
|
return pg.material_list_index > 0
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = context.scene.psk_export
|
||||||
|
pg.material_list.move(pg.material_list_index, pg.material_list_index - 1)
|
||||||
|
pg.material_list_index -= 1
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
|
class PskMaterialListItemMoveDown(Operator):
|
||||||
|
bl_idname = 'psk_export.material_list_item_move_down'
|
||||||
|
bl_label = 'Move Down'
|
||||||
|
bl_options = {'INTERNAL'}
|
||||||
|
bl_description = 'Move the selected material down one slot'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
pg = context.scene.psk_export
|
||||||
|
return pg.material_list_index < len(pg.material_list) - 1
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
pg = context.scene.psk_export
|
||||||
|
pg.material_list.move(pg.material_list_index, pg.material_list_index + 1)
|
||||||
|
pg.material_list_index += 1
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
class PskExportOperator(Operator, ExportHelper):
|
class PskExportOperator(Operator, ExportHelper):
|
||||||
bl_idname = 'export.psk'
|
bl_idname = 'export.psk'
|
||||||
bl_label = 'Export'
|
bl_label = 'Export'
|
||||||
@ -88,7 +152,7 @@ class PskExportOperator(Operator, ExportHelper):
|
|||||||
|
|
||||||
def invoke(self, context, event):
|
def invoke(self, context, event):
|
||||||
try:
|
try:
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
input_objects = get_psk_input_objects(context)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
@ -97,6 +161,7 @@ class PskExportOperator(Operator, ExportHelper):
|
|||||||
|
|
||||||
# Populate bone groups list.
|
# Populate bone groups list.
|
||||||
populate_bone_group_list(input_objects.armature_object, pg.bone_group_list)
|
populate_bone_group_list(input_objects.armature_object, pg.bone_group_list)
|
||||||
|
populate_material_list(input_objects.mesh_objects, pg.material_list)
|
||||||
|
|
||||||
context.window_manager.fileselect_add(self)
|
context.window_manager.fileselect_add(self)
|
||||||
|
|
||||||
@ -105,7 +170,7 @@ class PskExportOperator(Operator, ExportHelper):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def poll(cls, context):
|
def poll(cls, context):
|
||||||
try:
|
try:
|
||||||
PskBuilder.get_input_objects(context)
|
get_psk_input_objects(context)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
cls.poll_message_set(str(e))
|
cls.poll_message_set(str(e))
|
||||||
return False
|
return False
|
||||||
@ -116,11 +181,12 @@ class PskExportOperator(Operator, ExportHelper):
|
|||||||
scene = context.scene
|
scene = context.scene
|
||||||
pg = scene.psk_export
|
pg = scene.psk_export
|
||||||
|
|
||||||
|
layout.prop(pg, 'use_raw_mesh_data')
|
||||||
|
|
||||||
# BONES
|
# BONES
|
||||||
box = layout.box()
|
layout.label(text='Bones', icon='BONE_DATA')
|
||||||
box.label(text='Bones', icon='BONE_DATA')
|
|
||||||
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
|
bone_filter_mode_items = pg.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||||
row = box.row(align=True)
|
row = layout.row(align=True)
|
||||||
for item in bone_filter_mode_items:
|
for item in bone_filter_mode_items:
|
||||||
identifier = item.identifier
|
identifier = item.identifier
|
||||||
item_layout = row.row(align=True)
|
item_layout = row.row(align=True)
|
||||||
@ -128,20 +194,32 @@ class PskExportOperator(Operator, ExportHelper):
|
|||||||
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
||||||
|
|
||||||
if pg.bone_filter_mode == 'BONE_GROUPS':
|
if pg.bone_filter_mode == 'BONE_GROUPS':
|
||||||
row = box.row()
|
row = layout.row()
|
||||||
rows = max(3, min(len(pg.bone_group_list), 10))
|
rows = max(3, min(len(pg.bone_group_list), 10))
|
||||||
row.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows)
|
row.template_list('PSX_UL_BoneGroupList', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows)
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
# MATERIALS
|
||||||
|
layout.label(text='Materials', icon='MATERIAL')
|
||||||
|
row = layout.row()
|
||||||
|
rows = max(3, min(len(pg.bone_group_list), 10))
|
||||||
|
row.template_list('PSK_UL_MaterialList', '', pg, 'material_list', pg, 'material_list_index', rows=rows)
|
||||||
|
col = row.column(align=True)
|
||||||
|
col.operator(PskMaterialListItemMoveUp.bl_idname, text='', icon='TRIA_UP')
|
||||||
|
col.operator(PskMaterialListItemMoveDown.bl_idname, text='', icon='TRIA_DOWN')
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
pg = context.scene.psk_export
|
pg = context.scene.psk_export
|
||||||
builder = PskBuilder()
|
options = PskBuildOptions()
|
||||||
options = PskBuilderOptions()
|
|
||||||
options.bone_filter_mode = pg.bone_filter_mode
|
options.bone_filter_mode = pg.bone_filter_mode
|
||||||
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
|
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
|
||||||
|
options.use_raw_mesh_data = pg.use_raw_mesh_data
|
||||||
|
options.material_names = [m.material_name for m in pg.material_list]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
psk = builder.build(context, options)
|
psk = build_psk(context, options)
|
||||||
exporter = PskExporter(psk)
|
export_psk(psk, self.filepath)
|
||||||
exporter.export(self.filepath)
|
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
@ -161,9 +239,16 @@ class PskExportPropertyGroup(PropertyGroup):
|
|||||||
)
|
)
|
||||||
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
||||||
bone_group_list_index: IntProperty(default=0)
|
bone_group_list_index: IntProperty(default=0)
|
||||||
|
use_raw_mesh_data: BoolProperty(default=False, name='Raw Mesh Data', description='No modifiers will be evaluated as part of the exported mesh')
|
||||||
|
material_list: CollectionProperty(type=MaterialListItem)
|
||||||
|
material_list_index: IntProperty(default=0)
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
|
MaterialListItem,
|
||||||
|
PSK_UL_MaterialList,
|
||||||
|
PskMaterialListItemMoveUp,
|
||||||
|
PskMaterialListItemMoveDown,
|
||||||
PskExportOperator,
|
PskExportOperator,
|
||||||
PskExportPropertyGroup
|
PskExportPropertyGroup,
|
||||||
)
|
)
|
||||||
|
@ -12,25 +12,26 @@ from bpy_extras.io_utils import ImportHelper
|
|||||||
from mathutils import Quaternion, Vector, Matrix
|
from mathutils import Quaternion, Vector, Matrix
|
||||||
|
|
||||||
from .data import Psk
|
from .data import Psk
|
||||||
from .reader import PskReader
|
from .reader import read_psk
|
||||||
from ..helpers import rgb_to_srgb
|
from ..helpers import rgb_to_srgb
|
||||||
|
|
||||||
|
|
||||||
class PskImportOptions(object):
|
class PskImportOptions(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.name = ''
|
self.name = ''
|
||||||
|
self.should_import_mesh = True
|
||||||
self.should_import_vertex_colors = True
|
self.should_import_vertex_colors = True
|
||||||
self.vertex_color_space = 'sRGB'
|
self.vertex_color_space = 'sRGB'
|
||||||
self.should_import_vertex_normals = True
|
self.should_import_vertex_normals = True
|
||||||
self.should_import_extra_uvs = True
|
self.should_import_extra_uvs = True
|
||||||
|
self.should_import_skeleton = True
|
||||||
self.bone_length = 1.0
|
self.bone_length = 1.0
|
||||||
|
|
||||||
|
|
||||||
class PskImporter(object):
|
def import_psk(psk: Psk, context, options: PskImportOptions):
|
||||||
def __init__(self):
|
armature_object = None
|
||||||
pass
|
|
||||||
|
|
||||||
def import_psk(self, psk: Psk, context, options: PskImportOptions):
|
if options.should_import_skeleton:
|
||||||
# ARMATURE
|
# ARMATURE
|
||||||
armature_data = bpy.data.armatures.new(options.name)
|
armature_data = bpy.data.armatures.new(options.name)
|
||||||
armature_object = bpy.data.objects.new(options.name, armature_data)
|
armature_object = bpy.data.objects.new(options.name, armature_data)
|
||||||
@ -109,7 +110,8 @@ class PskImporter(object):
|
|||||||
edit_bone['orig_loc'] = import_bone.local_translation
|
edit_bone['orig_loc'] = import_bone.local_translation
|
||||||
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
|
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
|
||||||
|
|
||||||
# MESH
|
# MESH
|
||||||
|
if options.should_import_mesh:
|
||||||
mesh_data = bpy.data.meshes.new(options.name)
|
mesh_data = bpy.data.meshes.new(options.name)
|
||||||
mesh_object = bpy.data.objects.new(options.name, mesh_data)
|
mesh_object = bpy.data.objects.new(options.name, mesh_data)
|
||||||
|
|
||||||
@ -211,24 +213,25 @@ class PskImporter(object):
|
|||||||
|
|
||||||
# Get a list of all bones that have weights associated with them.
|
# Get a list of all bones that have weights associated with them.
|
||||||
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
||||||
for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))):
|
vertex_groups = [None] * len(psk.bones)
|
||||||
import_bone.vertex_group = mesh_object.vertex_groups.new(
|
for bone_index, psk_bone in map(lambda x: (x, psk.bones[x]), vertex_group_bone_indices):
|
||||||
name=import_bone.psk_bone.name.decode('windows-1252'))
|
vertex_groups[bone_index] = mesh_object.vertex_groups.new(name=psk_bone.name.decode('windows-1252'))
|
||||||
|
|
||||||
for weight in psk.weights:
|
for weight in psk.weights:
|
||||||
import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD')
|
vertex_groups[weight.bone_index].add((weight.point_index,), weight.weight, 'ADD')
|
||||||
|
|
||||||
# Add armature modifier to our mesh object.
|
|
||||||
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
|
||||||
armature_modifier.object = armature_object
|
|
||||||
mesh_object.parent = armature_object
|
|
||||||
|
|
||||||
context.scene.collection.objects.link(mesh_object)
|
context.scene.collection.objects.link(mesh_object)
|
||||||
|
|
||||||
try:
|
# Add armature modifier to our mesh object.
|
||||||
bpy.ops.object.mode_set(mode='OBJECT')
|
if options.should_import_skeleton:
|
||||||
except:
|
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
||||||
pass
|
armature_modifier.object = armature_object
|
||||||
|
mesh_object.parent = armature_object
|
||||||
|
|
||||||
|
try:
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class PskImportPropertyGroup(PropertyGroup):
|
class PskImportPropertyGroup(PropertyGroup):
|
||||||
@ -260,6 +263,18 @@ class PskImportPropertyGroup(PropertyGroup):
|
|||||||
options=set(),
|
options=set(),
|
||||||
description='Import extra UV maps from PSKX files, if available'
|
description='Import extra UV maps from PSKX files, if available'
|
||||||
)
|
)
|
||||||
|
should_import_mesh: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Mesh',
|
||||||
|
options=set(),
|
||||||
|
description='Import mesh'
|
||||||
|
)
|
||||||
|
should_import_skeleton: BoolProperty(
|
||||||
|
default=True,
|
||||||
|
name='Import Skeleton',
|
||||||
|
options=set(),
|
||||||
|
description='Import skeleton'
|
||||||
|
)
|
||||||
bone_length: FloatProperty(
|
bone_length: FloatProperty(
|
||||||
default=1.0,
|
default=1.0,
|
||||||
min=sys.float_info.epsilon,
|
min=sys.float_info.epsilon,
|
||||||
@ -273,7 +288,7 @@ class PskImportPropertyGroup(PropertyGroup):
|
|||||||
|
|
||||||
class PskImportOperator(Operator, ImportHelper):
|
class PskImportOperator(Operator, ImportHelper):
|
||||||
bl_idname = 'import.psk'
|
bl_idname = 'import.psk'
|
||||||
bl_label = 'Export'
|
bl_label = 'Import'
|
||||||
bl_options = {'INTERNAL', 'UNDO'}
|
bl_options = {'INTERNAL', 'UNDO'}
|
||||||
__doc__ = 'Load a PSK file'
|
__doc__ = 'Load a PSK file'
|
||||||
filename_ext = '.psk'
|
filename_ext = '.psk'
|
||||||
@ -286,29 +301,42 @@ class PskImportOperator(Operator, ImportHelper):
|
|||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
pg = context.scene.psk_import
|
pg = context.scene.psk_import
|
||||||
reader = PskReader()
|
|
||||||
psk = reader.read(self.filepath)
|
psk = read_psk(self.filepath)
|
||||||
|
|
||||||
options = PskImportOptions()
|
options = PskImportOptions()
|
||||||
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
|
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||||
|
options.should_import_mesh = pg.should_import_mesh
|
||||||
options.should_import_extra_uvs = pg.should_import_extra_uvs
|
options.should_import_extra_uvs = pg.should_import_extra_uvs
|
||||||
options.should_import_vertex_colors = pg.should_import_vertex_colors
|
options.should_import_vertex_colors = pg.should_import_vertex_colors
|
||||||
options.should_import_vertex_normals = pg.should_import_vertex_normals
|
options.should_import_vertex_normals = pg.should_import_vertex_normals
|
||||||
options.vertex_color_space = pg.vertex_color_space
|
options.vertex_color_space = pg.vertex_color_space
|
||||||
|
options.should_import_skeleton = pg.should_import_skeleton
|
||||||
options.bone_length = pg.bone_length
|
options.bone_length = pg.bone_length
|
||||||
PskImporter().import_psk(psk, context, options)
|
|
||||||
|
import_psk(psk, context, options)
|
||||||
|
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
def draw(self, context):
|
def draw(self, context):
|
||||||
pg = context.scene.psk_import
|
pg = context.scene.psk_import
|
||||||
layout = self.layout
|
layout = self.layout
|
||||||
layout.use_property_split = True
|
layout.prop(pg, 'should_import_mesh')
|
||||||
layout.use_property_decorate = False
|
row = layout.column()
|
||||||
layout.prop(pg, 'should_import_vertex_normals')
|
row.use_property_split = True
|
||||||
layout.prop(pg, 'should_import_extra_uvs')
|
row.use_property_decorate = False
|
||||||
layout.prop(pg, 'should_import_vertex_colors')
|
if pg.should_import_mesh:
|
||||||
if pg.should_import_vertex_colors:
|
row.prop(pg, 'should_import_vertex_normals')
|
||||||
layout.prop(pg, 'vertex_color_space')
|
row.prop(pg, 'should_import_extra_uvs')
|
||||||
layout.prop(pg, 'bone_length')
|
row.prop(pg, 'should_import_vertex_colors')
|
||||||
|
if pg.should_import_vertex_colors:
|
||||||
|
row.prop(pg, 'vertex_color_space')
|
||||||
|
layout.prop(pg, 'should_import_skeleton')
|
||||||
|
row = layout.column()
|
||||||
|
row.use_property_split = True
|
||||||
|
row.use_property_decorate = False
|
||||||
|
if pg.should_import_skeleton:
|
||||||
|
row.prop(pg, 'bone_length')
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
|
@ -3,53 +3,48 @@ import ctypes
|
|||||||
from .data import *
|
from .data import *
|
||||||
|
|
||||||
|
|
||||||
class PskReader(object):
|
def _read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
||||||
|
buffer_length = section.data_size * section.data_count
|
||||||
|
buffer = fp.read(buffer_length)
|
||||||
|
offset = 0
|
||||||
|
for _ in range(section.data_count):
|
||||||
|
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||||
|
offset += section.data_size
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
def read_psk(path) -> Psk:
|
||||||
def read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
psk = Psk()
|
||||||
buffer_length = section.data_size * section.data_count
|
with open(path, 'rb') as fp:
|
||||||
buffer = fp.read(buffer_length)
|
while fp.read(1):
|
||||||
offset = 0
|
fp.seek(-1, 1)
|
||||||
for _ in range(section.data_count):
|
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
||||||
data.append(data_class.from_buffer_copy(buffer, offset))
|
if section.name == b'ACTRHEAD':
|
||||||
offset += section.data_size
|
pass
|
||||||
|
elif section.name == b'PNTS0000':
|
||||||
def read(self, path) -> Psk:
|
_read_types(fp, Vector3, section, psk.points)
|
||||||
psk = Psk()
|
elif section.name == b'VTXW0000':
|
||||||
with open(path, 'rb') as fp:
|
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
||||||
while fp.read(1):
|
_read_types(fp, Psk.Wedge16, section, psk.wedges)
|
||||||
fp.seek(-1, 1)
|
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
||||||
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
_read_types(fp, Psk.Wedge32, section, psk.wedges)
|
||||||
if section.name == b'ACTRHEAD':
|
|
||||||
pass
|
|
||||||
elif section.name == b'PNTS0000':
|
|
||||||
PskReader.read_types(fp, Vector3, section, psk.points)
|
|
||||||
elif section.name == b'VTXW0000':
|
|
||||||
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
|
||||||
PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges)
|
|
||||||
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
|
||||||
PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges)
|
|
||||||
else:
|
|
||||||
raise RuntimeError('Unrecognized wedge format')
|
|
||||||
elif section.name == b'FACE0000':
|
|
||||||
PskReader.read_types(fp, Psk.Face, section, psk.faces)
|
|
||||||
elif section.name == b'MATT0000':
|
|
||||||
PskReader.read_types(fp, Psk.Material, section, psk.materials)
|
|
||||||
elif section.name == b'REFSKELT':
|
|
||||||
PskReader.read_types(fp, Psk.Bone, section, psk.bones)
|
|
||||||
elif section.name == b'RAWWEIGHTS':
|
|
||||||
PskReader.read_types(fp, Psk.Weight, section, psk.weights)
|
|
||||||
elif section.name == b'FACE3200':
|
|
||||||
PskReader.read_types(fp, Psk.Face32, section, psk.faces)
|
|
||||||
elif section.name == b'VERTEXCOLOR':
|
|
||||||
PskReader.read_types(fp, Color, section, psk.vertex_colors)
|
|
||||||
elif section.name.startswith(b'EXTRAUVS'):
|
|
||||||
PskReader.read_types(fp, Vector2, section, psk.extra_uvs)
|
|
||||||
elif section.name == b'VTXNORMS':
|
|
||||||
PskReader.read_types(fp, Vector3, section, psk.vertex_normals)
|
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
|
raise RuntimeError('Unrecognized wedge format')
|
||||||
return psk
|
elif section.name == b'FACE0000':
|
||||||
|
_read_types(fp, Psk.Face, section, psk.faces)
|
||||||
|
elif section.name == b'MATT0000':
|
||||||
|
_read_types(fp, Psk.Material, section, psk.materials)
|
||||||
|
elif section.name == b'REFSKELT':
|
||||||
|
_read_types(fp, Psk.Bone, section, psk.bones)
|
||||||
|
elif section.name == b'RAWWEIGHTS':
|
||||||
|
_read_types(fp, Psk.Weight, section, psk.weights)
|
||||||
|
elif section.name == b'FACE3200':
|
||||||
|
_read_types(fp, Psk.Face32, section, psk.faces)
|
||||||
|
elif section.name == b'VERTEXCOLOR':
|
||||||
|
_read_types(fp, Color, section, psk.vertex_colors)
|
||||||
|
elif section.name.startswith(b'EXTRAUVS'):
|
||||||
|
_read_types(fp, Vector2, section, psk.extra_uvs)
|
||||||
|
elif section.name == b'VTXNORMS':
|
||||||
|
_read_types(fp, Vector3, section, psk.vertex_normals)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'Unrecognized section "{section.name} at position {15:fp.tell()}"')
|
||||||
|
return psk
|
||||||
|
@ -15,10 +15,6 @@ class BoneGroupListItem(PropertyGroup):
|
|||||||
count: IntProperty()
|
count: IntProperty()
|
||||||
is_selected: BoolProperty(default=False)
|
is_selected: BoolProperty(default=False)
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
classes = (
|
classes = (
|
||||||
BoneGroupListItem,
|
BoneGroupListItem,
|
||||||
|
Loading…
Reference in New Issue
Block a user