1
0
mirror of https://github.com/DarklightGames/io_scene_psk_psa.git synced 2025-02-17 10:08:31 +01:00

Merge branch 'blender-4.0'

# Conflicts:
#	README.md
This commit is contained in:
Colin Basnett 2023-11-14 01:39:59 -08:00
commit 89767569ec
15 changed files with 377 additions and 242 deletions

View File

@ -16,13 +16,23 @@ This Blender add-on allows you to import and export meshes and animations to and
* Non-standard file section data is supported for import only (vertex normals, extra UV channels, vertex colors, shape keys).
* Fine-grained PSA sequence importing for efficient workflow when working with large PSA files.
* PSA sequence metadata (e.g., frame rate, sequence name) is preserved on import, allowing this data to be reused on export.
* Specific [bone groups](https://docs.blender.org/manual/en/latest/animation/armatures/properties/bone_groups.html) can be excluded from PSK/PSA export (useful for excluding non-contributing bones such as IK controllers).
* PSA sequences can be exported directly from actions or delineated using a scene's [timeline markers](https://docs.blender.org/manual/en/latest/animation/markers.html), allowing direct use of the [NLA](https://docs.blender.org/manual/en/latest/editors/nla/index.html) when creating sequences.
* Specific bone collections can be excluded from PSK/PSA export (useful for excluding non-contributing bones such as IK controllers).
* PSA sequences can be exported directly from actions or delineated using a scene's [timeline markers](https://docs.blender.org/manual/en/latest/animation/markers.html) or NLA track strips, allowing direct use of the [NLA](https://docs.blender.org/manual/en/latest/editors/nla/index.html) when creating sequences.
* Manual re-ordering of material slots when exporting multiple mesh objects.
## Compatibility
| Blender Version | Addon Version | LTS Support |
|--------------------------------------------------------------|--------------------------------------------------------------------------------|-------------------|
| 4.0+ | [latest](https://github.com/DarklightGames/io_scene_psk_psa/releases/latest) | TBD |
| [3.4 - 3.6](https://www.blender.org/download/lts/3-6/) | [5.0.5](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/5.0.5) | ✅️ June 2025 |
| [2.93 - 3.3](https://www.blender.org/download/releases/3-3/) | [4.3.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/4.3.0) | ✅️ September 2024 |
Bug fixes will be issued for legacy addon versions that are under [Blender's LTS maintenance period](https://www.blender.org/download/lts/). Once the LTS period has ended, legacy addon-on versions will no longer be supported by the maintainers of this repository, although we will accept pull requests for bug fixes.
# Installation
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
2. Open Blender 3.4.0 or later.
2. Open Blender 4.0.0 or later.
3. Navigate to the Blender Preferences (`Edit` > `Preferences`).
4. Select the `Add-ons` tab.
5. Click the `Install...` button.

View File

@ -1,8 +1,10 @@
from bpy.app.handlers import persistent
bl_info = {
"name": "PSK/PSA Importer/Exporter",
"author": "Colin Basnett, Yurii Ti",
"version": (5, 0, 6),
"blender": (3, 4, 0),
"version": (6, 0, 0),
"blender": (4, 0, 0),
"description": "PSK/PSA Import/Export (.psk/.psa)",
"warning": "",
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
@ -124,3 +126,16 @@ def unregister():
if __name__ == '__main__':
register()
@persistent
def load_handler(dummy):
# Convert old `psa_sequence_fps` property to new `psa_export.fps` property.
# This is only needed for backwards compatibility with files that may have used older versions of the addon.
for action in bpy.data.actions:
if 'psa_sequence_fps' in action:
action.psa_export.fps = action['psa_sequence_fps']
del action['psa_sequence_fps']
bpy.app.handlers.load_post.append(load_handler)

View File

@ -1,7 +1,5 @@
import datetime
import re
import typing
from collections import Counter
from typing import List, Iterable
import addon_utils
@ -9,24 +7,6 @@ import bpy.types
from bpy.types import NlaStrip, Object, AnimData
class Timer:
def __enter__(self):
self.start = datetime.datetime.now()
self.interval = None
return self
def __exit__(self, *args):
self.end = datetime.datetime.now()
self.interval = self.end - self.start
@property
def duration(self):
if self.interval is not None:
return self.interval
else:
return datetime.datetime.now() - self.start
def rgb_to_srgb(c: float):
if c > 0.0031308:
return 1.055 * (pow(c, (1.0 / 2.4))) - 0.055
@ -34,7 +14,7 @@ def rgb_to_srgb(c: float):
return 12.92 * c
def get_nla_strips_in_timeframe(animation_data: AnimData, frame_min: float, frame_max: float) -> List[NlaStrip]:
def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, frame_max: float) -> List[NlaStrip]:
if animation_data is None:
return []
strips = []
@ -49,46 +29,49 @@ def get_nla_strips_in_timeframe(animation_data: AnimData, frame_min: float, fram
return strips
def populate_bone_group_list(armature_object: Object, bone_group_list: bpy.props.CollectionProperty) -> None:
def populate_bone_collection_list(armature_object: Object, bone_collection_list: bpy.props.CollectionProperty) -> None:
"""
Updates the bone group collection.
Updates the bone collections collection.
Bone group selections are preserved between updates unless none of the groups were previously selected;
otherwise, all groups are selected by default.
Bone collection selections are preserved between updates unless none of the groups were previously selected;
otherwise, all collections are selected by default.
"""
has_selected_groups = any([g.is_selected for g in bone_group_list])
unassigned_group_is_selected, selected_assigned_group_names = True, []
has_selected_collections = any([g.is_selected for g in bone_collection_list])
unassigned_collection_is_selected, selected_assigned_collection_names = True, []
if armature_object is None:
return
if has_selected_groups:
if has_selected_collections:
# Preserve group selections before clearing the list.
# We handle selections for the unassigned group separately to cover the edge case
# where there might be an actual group with 'Unassigned' as its name.
unassigned_group_idx, unassigned_group_is_selected = next(iter([
(i, g.is_selected) for i, g in enumerate(bone_group_list) if g.index == -1]), (-1, False))
unassigned_collection_idx, unassigned_collection_is_selected = next(iter([
(i, g.is_selected) for i, g in enumerate(bone_collection_list) if g.index == -1]), (-1, False))
selected_assigned_group_names = [
g.name for i, g in enumerate(bone_group_list) if i != unassigned_group_idx and g.is_selected]
selected_assigned_collection_names = [
g.name for i, g in enumerate(bone_collection_list) if i != unassigned_collection_idx and g.is_selected]
bone_group_list.clear()
bone_collection_list.clear()
if armature_object and armature_object.pose:
bone_group_counts = Counter(map(lambda x: x.bone_group, armature_object.pose.bones))
armature = armature_object.data
item = bone_group_list.add()
item.name = 'Unassigned'
item.index = -1
item.count = 0 if None not in bone_group_counts else bone_group_counts[None]
item.is_selected = unassigned_group_is_selected
if armature is None:
return
for bone_group_index, bone_group in enumerate(armature_object.pose.bone_groups):
item = bone_group_list.add()
item.name = bone_group.name
item.index = bone_group_index
item.count = 0 if bone_group not in bone_group_counts else bone_group_counts[bone_group]
item.is_selected = bone_group.name in selected_assigned_group_names if has_selected_groups else True
item = bone_collection_list.add()
item.name = 'Unassigned'
item.index = -1
# Count the number of bones without an assigned bone collection
item.count = sum(map(lambda bone: 1 if len(bone.collections) == 0 else 0, armature.bones))
item.is_selected = unassigned_collection_is_selected
for bone_collection_index, bone_collection in enumerate(armature.collections):
item = bone_collection_list.add()
item.name = bone_collection.name
item.index = bone_collection_index
item.count = len(bone_collection.bones)
item.is_selected = bone_collection.name in selected_assigned_collection_names if has_selected_collections else True
def check_bone_names(bone_names: Iterable[str]):
@ -100,15 +83,15 @@ def check_bone_names(bone_names: Iterable[str]):
f'You can bypass this by disabling "Enforce Bone Name Restrictions" in the export settings.')
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_group_indices: List[int]) -> List[str]:
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: List[int]) -> List[str]:
"""
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
Note that the ancestors of bones within the bone groups will also be present in the returned list.
Note that the ancestors of bones within the bone collections will also be present in the returned list.
:param armature_object: Blender object with type 'ARMATURE'
:param bone_filter_mode: One of ['ALL', 'BONE_GROUPS']
:param bone_group_indices: List of bone group indices to be exported.
:param bone_filter_mode: One of ['ALL', 'BONE_COLLECTIONS']
:param bone_collection_indices: List of bone collection indices to be exported.
:return: A sorted list of bone indices that should be exported.
"""
if armature_object is None or armature_object.type != 'ARMATURE':
@ -116,16 +99,21 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_g
armature_data = typing.cast(bpy.types.Armature, armature_object.data)
bones = armature_data.bones
pose_bones = armature_object.pose.bones
bone_names = [x.name for x in bones]
# Get a list of the bone indices that we are explicitly including.
bone_index_stack = []
is_exporting_none_bone_groups = -1 in bone_group_indices
for bone_index, pose_bone in enumerate(pose_bones):
is_exporting_unassigned_bone_collections = -1 in bone_collection_indices
bone_collections = list(armature_data.collections)
for bone_index, bone in enumerate(bones):
# Check if this bone is in any of the collections in the bone collection indices list.
this_bone_collection_indices = set(bone_collections.index(x) for x in bone.collections)
is_in_exported_bone_collections = len(set(bone_collection_indices).intersection(this_bone_collection_indices)) > 0
if bone_filter_mode == 'ALL' or \
(pose_bone.bone_group is None and is_exporting_none_bone_groups) or \
(pose_bone.bone_group is not None and pose_bone.bone_group_index in bone_group_indices):
(len(bone.collections) == 0 and is_exporting_unassigned_bone_collections) or \
is_in_exported_bone_collections:
bone_index_stack.append((bone_index, None))
# For each bone that is explicitly being added, recursively walk up the hierarchy and ensure that all of
@ -146,7 +134,7 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_g
# Split out the bone indices and the instigator bone names into separate lists.
# We use the bone names for the return values because the bone name is a more universal way of referencing them.
# For example, users of this function may modify bone lists, which would invalidate the indices and require a
# For example, users of this function may modify bone lists, which would invalidate the indices and require an
# index mapping scheme to resolve it. Using strings is more comfy and results in less code downstream.
instigator_bone_names = [bones[x[1]].name if x[1] is not None else None for x in bone_indices]
bone_names = [bones[x[0]].name for x in bone_indices]

View File

@ -26,7 +26,7 @@ class PsaBuildOptions:
self.animation_data: Optional[AnimData] = None
self.sequences: List[PsaBuildSequence] = []
self.bone_filter_mode: str = 'ALL'
self.bone_group_indices: List[int] = []
self.bone_collection_indices: List[int] = []
self.should_enforce_bone_name_restrictions: bool = False
self.sequence_name_prefix: str = ''
self.sequence_name_suffix: str = ''
@ -73,7 +73,7 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
pose_bones = [x[1] for x in pose_bones]
# Get a list of all the bone indices and instigator bones for the bone filter settings.
export_bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
export_bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_collection_indices)
bone_indices = [bone_names.index(x) for x in export_bone_names]
# Make the bone lists contain only the bones that are going to be exported.
@ -91,7 +91,11 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
# Build list of PSA bones.
for bone in bones:
psa_bone = Psa.Bone()
psa_bone.name = bytes(bone.name, encoding='windows-1252')
try:
psa_bone.name = bytes(bone.name, encoding='windows-1252')
except UnicodeEncodeError:
raise RuntimeError(f'Bone name "{bone.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
try:
parent_index = bones.index(bone.parent)
@ -165,7 +169,10 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
frame_step = -frame_step
psa_sequence = Psa.Sequence()
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
try:
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
except UnicodeEncodeError:
raise RuntimeError(f'Sequence name "{export_sequence.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
psa_sequence.frame_count = frame_count
psa_sequence.frame_start_index = frame_start_index
psa_sequence.fps = frame_count / sequence_duration

View File

@ -8,10 +8,10 @@ from bpy.types import Context, Armature, Action, Object, AnimData, TimelineMarke
from bpy_extras.io_utils import ExportHelper
from bpy_types import Operator
from .properties import PSA_PG_export, PSA_PG_export_action_list_item, filter_sequences
from ..builder import build_psa, PsaBuildSequence, PsaBuildOptions
from ..export.properties import PSA_PG_export, PSA_PG_export_action_list_item, filter_sequences
from ..writer import write_psa
from ...helpers import populate_bone_group_list, get_nla_strips_in_timeframe
from ...helpers import populate_bone_collection_list, get_nla_strips_in_frame_range
def is_action_for_armature(armature: Armature, action: Action):
@ -80,12 +80,14 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
continue
if marker_name.startswith('#'):
continue
item = pg.marker_list.add()
item.name = marker_name
item.is_selected = False
frame_start, frame_end = sequence_frame_ranges[marker_name]
item.frame_start = frame_start
item.frame_end = frame_end
sequences = get_sequences_from_name_and_frame_range(marker_name, frame_start, frame_end)
for (sequence_name, frame_start, frame_end) in sequences:
item = pg.marker_list.add()
item.name = sequence_name
item.is_selected = False
item.frame_start = frame_start
item.frame_end = frame_end
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
@ -95,16 +97,7 @@ def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actio
return fps_custom
elif fps_source == 'ACTION_METADATA':
# Get the minimum value of action metadata FPS values.
fps_list = []
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
fps = action['psa_sequence_fps']
if type(fps) == int or type(fps) == float:
fps_list.append(fps)
if len(fps_list) > 0:
return min(fps_list)
else:
# No valid action metadata to use, fallback to scene FPS
return context.scene.render.fps
return min([action.psa_export.fps for action in actions])
else:
raise RuntimeError(f'Invalid FPS source "{fps_source}"')
@ -126,9 +119,9 @@ def get_animation_data_object(context: Context) -> Object:
def is_bone_filter_mode_item_available(context, identifier):
if identifier == 'BONE_GROUPS':
obj = context.active_object
if not obj.pose or not obj.pose.bone_groups:
if identifier == 'BONE_COLLECTIONS':
armature = context.active_object.data
if len(armature.collections) == 0:
return False
return True
@ -150,7 +143,7 @@ def get_timeline_marker_sequence_frame_ranges(animation_data: AnimData, context:
if next_marker_index < len(sorted_timeline_markers):
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
frame_end = sorted_timeline_markers[next_marker_index].frame
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_end)
nla_strips = get_nla_strips_in_frame_range(animation_data, marker.frame, frame_end)
if len(nla_strips) > 0:
frame_end = min(frame_end, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
frame_start = max(frame_start, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
@ -174,11 +167,9 @@ def get_timeline_marker_sequence_frame_ranges(animation_data: AnimData, context:
return sequence_frame_ranges
def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]:
frame_start = int(action.frame_range[0])
frame_end = int(action.frame_range[1])
def get_sequences_from_name_and_frame_range(name: str, frame_start: int, frame_end: int) -> List[Tuple[str, int, int]]:
reversed_pattern = r'(.+)/(.+)'
reversed_match = re.match(reversed_pattern, action.name)
reversed_match = re.match(reversed_pattern, name)
if reversed_match:
forward_name = reversed_match.group(1)
backwards_name = reversed_match.group(2)
@ -187,7 +178,13 @@ def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]:
(backwards_name, frame_end, frame_start)
]
else:
return [(action.name, frame_start, frame_end)]
return [(name, frame_start, frame_end)]
def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]:
frame_start = int(action.frame_range[0])
frame_end = int(action.frame_range[1])
return get_sequences_from_name_and_frame_range(action.name, frame_start, frame_end)
def get_sequences_from_action_pose_marker(action: Action, pose_markers: List[TimelineMarker], pose_marker: TimelineMarker, pose_marker_index: int) -> List[Tuple[str, int, int]]:
@ -196,17 +193,7 @@ def get_sequences_from_action_pose_marker(action: Action, pose_markers: List[Tim
frame_end = pose_markers[pose_marker_index + 1].frame
else:
frame_end = int(action.frame_range[1])
reversed_pattern = r'(.+)/(.+)'
reversed_match = re.match(reversed_pattern, pose_marker.name)
if reversed_match:
forward_name = reversed_match.group(1)
backwards_name = reversed_match.group(2)
return [
(forward_name, frame_start, frame_end),
(backwards_name, frame_end, frame_start)
]
else:
return [(pose_marker.name, frame_start, frame_end)]
return get_sequences_from_name_and_frame_range(pose_marker.name, frame_start, frame_end)
def get_visible_sequences(pg: PSA_PG_export, sequences) -> List[PSA_PG_export_action_list_item]:
@ -254,12 +241,18 @@ class PSA_OT_export(Operator, ExportHelper):
# SOURCE
layout.prop(pg, 'sequence_source', text='Source')
if pg.sequence_source == 'TIMELINE_MARKERS':
if pg.sequence_source in {'TIMELINE_MARKERS', 'NLA_TRACK_STRIPS'}:
# ANIMDATA SOURCE
layout.prop(pg, 'should_override_animation_data')
if pg.should_override_animation_data:
layout.prop(pg, 'animation_data_override', text='')
if pg.sequence_source == 'NLA_TRACK_STRIPS':
flow = layout.grid_flow()
flow.use_property_split = True
flow.use_property_decorate = False
flow.prop(pg, 'nla_track')
# SELECT ALL/NONE
row = layout.row(align=True)
row.label(text='Select')
@ -269,25 +262,19 @@ class PSA_OT_export(Operator, ExportHelper):
# ACTIONS
if pg.sequence_source == 'ACTIONS':
rows = max(3, min(len(pg.action_list), 10))
layout.template_list('PSA_UL_export_sequences', '', pg, 'action_list', pg, 'action_list_index', rows=rows)
col = layout.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'sequence_name_prefix')
col.prop(pg, 'sequence_name_suffix')
elif pg.sequence_source == 'TIMELINE_MARKERS':
rows = max(3, min(len(pg.marker_list), 10))
layout.template_list('PSA_UL_export_sequences', '', pg, 'marker_list', pg, 'marker_list_index',
rows=rows)
layout.template_list('PSA_UL_export_sequences', '', pg, 'marker_list', pg, 'marker_list_index', rows=rows)
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
rows = max(3, min(len(pg.nla_strip_list), 10))
layout.template_list('PSA_UL_export_sequences', '', pg, 'nla_strip_list', pg, 'nla_strip_list_index', rows=rows)
col = layout.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'sequence_name_prefix')
col.prop(pg, 'sequence_name_suffix')
col = layout.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(pg, 'sequence_name_prefix')
col.prop(pg, 'sequence_name_suffix')
# Determine if there is going to be a naming conflict and display an error, if so.
selected_items = [x for x in pg.action_list if x.is_selected]
@ -304,13 +291,13 @@ class PSA_OT_export(Operator, ExportHelper):
row = layout.row(align=True)
row.prop(pg, 'bone_filter_mode', text='Bones')
if pg.bone_filter_mode == 'BONE_GROUPS':
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
row = layout.row(align=True)
row.label(text='Select')
row.operator(PSA_OT_export_bone_groups_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
row.operator(PSA_OT_export_bone_groups_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
rows = max(3, min(len(pg.bone_group_list), 10))
layout.template_list('PSX_UL_bone_group_list', '', pg, 'bone_group_list', pg, 'bone_group_list_index',
row.operator(PSA_OT_export_bone_collections_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
row.operator(PSA_OT_export_bone_collections_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
rows = max(3, min(len(pg.bone_collection_list), 10))
layout.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index',
rows=rows)
layout.prop(pg, 'should_enforce_bone_name_restrictions')
@ -345,8 +332,7 @@ class PSA_OT_export(Operator, ExportHelper):
update_actions_and_timeline_markers(context, self.armature_object.data)
# Populate bone groups list.
populate_bone_group_list(self.armature_object, pg.bone_group_list)
populate_bone_collection_list(self.armature_object, pg.bone_collection_list)
context.window_manager.fileselect_add(self)
@ -360,6 +346,8 @@ class PSA_OT_export(Operator, ExportHelper):
raise RuntimeError('No actions were selected for export')
elif pg.sequence_source == 'TIMELINE_MARKERS' and len(pg.marker_list) == 0:
raise RuntimeError('No timeline markers were selected for export')
elif pg.sequence_source == 'NLA_TRACK_STRIPS' and len(pg.nla_strip_list) == 0:
raise RuntimeError('No NLA track strips were selected for export')
# Populate the export sequence list.
animation_data_object = get_animation_data_object(context)
@ -371,29 +359,38 @@ class PSA_OT_export(Operator, ExportHelper):
export_sequences: List[PsaBuildSequence] = []
if pg.sequence_source == 'ACTIONS':
for action in filter(lambda x: x.is_selected, pg.action_list):
if len(action.action.fcurves) == 0:
for action_item in filter(lambda x: x.is_selected, pg.action_list):
if len(action_item.action.fcurves) == 0:
continue
export_sequence = PsaBuildSequence()
export_sequence.nla_state.action = action.action
export_sequence.name = action.name
export_sequence.nla_state.frame_start = action.frame_start
export_sequence.nla_state.frame_end = action.frame_end
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action.action])
export_sequence.compression_ratio = action.action.psa_export.compression_ratio
export_sequence.key_quota = action.action.psa_export.key_quota
export_sequence.nla_state.action = action_item.action
export_sequence.name = action_item.name
export_sequence.nla_state.frame_start = action_item.frame_start
export_sequence.nla_state.frame_end = action_item.frame_end
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action_item.action])
export_sequence.compression_ratio = action_item.action.psa_export.compression_ratio
export_sequence.key_quota = action_item.action.psa_export.key_quota
export_sequences.append(export_sequence)
elif pg.sequence_source == 'TIMELINE_MARKERS':
for marker in pg.marker_list:
for marker_item in filter(lambda x: x.is_selected, pg.marker_list):
export_sequence = PsaBuildSequence()
export_sequence.name = marker.name
export_sequence.name = marker_item.name
export_sequence.nla_state.action = None
export_sequence.nla_state.frame_start = marker.frame_start
export_sequence.nla_state.frame_end = marker.frame_end
export_sequence.nla_state.frame_start = marker_item.frame_start
export_sequence.nla_state.frame_end = marker_item.frame_end
nla_strips_actions = set(
map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, marker.frame_start, marker.frame_end)))
map(lambda x: x.action, get_nla_strips_in_frame_range(animation_data, marker_item.frame_start, marker_item.frame_end)))
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
export_sequences.append(export_sequence)
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
for nla_strip_item in filter(lambda x: x.is_selected, pg.nla_strip_list):
export_sequence = PsaBuildSequence()
export_sequence.name = nla_strip_item.name
export_sequence.nla_state.action = None
export_sequence.nla_state.frame_start = nla_strip_item.frame_start
export_sequence.nla_state.frame_end = nla_strip_item.frame_end
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [nla_strip_item.action])
export_sequences.append(export_sequence)
else:
raise ValueError(f'Unhandled sequence source: {pg.sequence_source}')
@ -401,7 +398,7 @@ class PSA_OT_export(Operator, ExportHelper):
options.animation_data = animation_data
options.sequences = export_sequences
options.bone_filter_mode = pg.bone_filter_mode
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
options.bone_collection_indices = [x.index for x in pg.bone_collection_list if x.is_selected]
options.should_ignore_bone_name_restrictions = pg.should_enforce_bone_name_restrictions
options.sequence_name_prefix = pg.sequence_name_prefix
options.sequence_name_suffix = pg.sequence_name_suffix
@ -432,6 +429,8 @@ class PSA_OT_export_actions_select_all(Operator):
return pg.action_list
elif pg.sequence_source == 'TIMELINE_MARKERS':
return pg.marker_list
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
return pg.nla_strip_list
return None
@classmethod
@ -463,6 +462,8 @@ class PSA_OT_export_actions_deselect_all(Operator):
return pg.action_list
elif pg.sequence_source == 'TIMELINE_MARKERS':
return pg.marker_list
elif pg.sequence_source == 'NLA_TRACK_STRIPS':
return pg.nla_strip_list
return None
@classmethod
@ -479,42 +480,42 @@ class PSA_OT_export_actions_deselect_all(Operator):
return {'FINISHED'}
class PSA_OT_export_bone_groups_select_all(Operator):
bl_idname = 'psa_export.bone_groups_select_all'
class PSA_OT_export_bone_collections_select_all(Operator):
bl_idname = 'psa_export.bone_collections_select_all'
bl_label = 'Select All'
bl_description = 'Select all bone groups'
bl_description = 'Select all bone collections'
bl_options = {'INTERNAL'}
@classmethod
def poll(cls, context):
pg = getattr(context.scene, 'psa_export')
item_list = pg.bone_group_list
item_list = pg.bone_collection_list
has_unselected_items = any(map(lambda action: not action.is_selected, item_list))
return len(item_list) > 0 and has_unselected_items
def execute(self, context):
pg = getattr(context.scene, 'psa_export')
for item in pg.bone_group_list:
for item in pg.bone_collection_list:
item.is_selected = True
return {'FINISHED'}
class PSA_OT_export_bone_groups_deselect_all(Operator):
bl_idname = 'psa_export.bone_groups_deselect_all'
class PSA_OT_export_bone_collections_deselect_all(Operator):
bl_idname = 'psa_export.bone_collections_deselect_all'
bl_label = 'Deselect All'
bl_description = 'Deselect all bone groups'
bl_description = 'Deselect all bone collections'
bl_options = {'INTERNAL'}
@classmethod
def poll(cls, context):
pg = getattr(context.scene, 'psa_export')
item_list = pg.bone_group_list
item_list = pg.bone_collection_list
has_selected_actions = any(map(lambda action: action.is_selected, item_list))
return len(item_list) > 0 and has_selected_actions
def execute(self, context):
pg = getattr(context.scene, 'psa_export')
for action in pg.bone_group_list:
for action in pg.bone_collection_list:
action.is_selected = False
return {'FINISHED'}
@ -523,6 +524,6 @@ classes = (
PSA_OT_export,
PSA_OT_export_actions_select_all,
PSA_OT_export_actions_deselect_all,
PSA_OT_export_bone_groups_select_all,
PSA_OT_export_bone_groups_deselect_all,
PSA_OT_export_bone_collections_select_all,
PSA_OT_export_bone_collections_deselect_all,
)

View File

@ -1,12 +1,13 @@
import re
import sys
from fnmatch import fnmatch
from typing import List
from typing import List, Optional
from bpy.props import BoolProperty, PointerProperty, EnumProperty, FloatProperty, CollectionProperty, IntProperty, \
StringProperty
from bpy.types import PropertyGroup, Object, Action
from bpy.types import PropertyGroup, Object, Action, AnimData, Context
from ...types import PSX_PG_bone_group_list_item
from ...types import PSX_PG_bone_collection_list_item
def psa_export_property_group_animation_data_override_poll(_context, obj):
@ -25,7 +26,7 @@ class PSA_PG_export_action_list_item(PropertyGroup):
is_pose_marker: BoolProperty(options={'HIDDEN'})
class PSA_PG_export_timeline_markers(PropertyGroup):
class PSA_PG_export_timeline_markers(PropertyGroup): # TODO: rename this to singular
marker_index: IntProperty()
name: StringProperty()
is_selected: BoolProperty(default=True)
@ -33,6 +34,51 @@ class PSA_PG_export_timeline_markers(PropertyGroup):
frame_end: IntProperty(options={'HIDDEN'})
class PSA_PG_export_nla_strip_list_item(PropertyGroup):
name: StringProperty()
action: PointerProperty(type=Action)
frame_start: FloatProperty()
frame_end: FloatProperty()
is_selected: BoolProperty(default=True)
def nla_track_update_cb(self: 'PSA_PG_export', context: Context) -> None:
self.nla_strip_list.clear()
if context.object is None or context.object.animation_data is None:
return
match = re.match(r'^(\d+).+$', self.nla_track)
self.nla_track_index = int(match.group(1)) if match else -1
if self.nla_track_index >= 0:
nla_track = context.object.animation_data.nla_tracks[self.nla_track_index]
for nla_strip in nla_track.strips:
strip: PSA_PG_export_nla_strip_list_item = self.nla_strip_list.add()
strip.action = nla_strip.action
strip.name = nla_strip.name
strip.frame_start = nla_strip.frame_start
strip.frame_end = nla_strip.frame_end
def get_animation_data(pg: 'PSA_PG_export', context: Context) -> Optional[AnimData]:
animation_data_object = context.object
if pg.should_override_animation_data:
animation_data_object = pg.animation_data_override
return animation_data_object.animation_data if animation_data_object else None
def nla_track_search_cb(self, context: Context, edit_text: str):
pg = getattr(context.scene, 'psa_export')
animation_data = get_animation_data(pg, context)
if animation_data is None:
return
for index, nla_track in enumerate(animation_data.nla_tracks):
yield f'{index} - {nla_track.name}'
def animation_data_override_update_cb(self: 'PSA_PG_export', context: Context):
# Reset NLA track selection
self.nla_track = ''
class PSA_PG_export(PropertyGroup):
root_motion: BoolProperty(
name='Root Motion',
@ -46,10 +92,12 @@ class PSA_PG_export(PropertyGroup):
name='Override Animation Data',
options=empty_set,
default=False,
description='Use the animation data from a different object instead of the selected object'
description='Use the animation data from a different object instead of the selected object',
update=animation_data_override_update_cb,
)
animation_data_override: PointerProperty(
type=Object,
update=animation_data_override_update_cb,
poll=psa_export_property_group_animation_data_override_poll
)
sequence_source: EnumProperty(
@ -58,19 +106,25 @@ class PSA_PG_export(PropertyGroup):
description='',
items=(
('ACTIONS', 'Actions', 'Sequences will be exported using actions', 'ACTION', 0),
('TIMELINE_MARKERS', 'Timeline Markers', 'Sequences will be exported using timeline markers', 'MARKER_HLT',
1),
('TIMELINE_MARKERS', 'Timeline Markers', 'Sequences are delineated by scene timeline markers', 'MARKER_HLT', 1),
('NLA_TRACK_STRIPS', 'NLA Track Strips', 'Sequences are delineated by the start & end times of strips on the selected NLA track', 'NLA', 2)
)
)
nla_track: StringProperty(
name='NLA Track',
options=empty_set,
description='',
search=nla_track_search_cb,
update=nla_track_update_cb
)
nla_track_index: IntProperty(name='NLA Track Index', default=-1)
fps_source: EnumProperty(
name='FPS Source',
options=empty_set,
description='',
items=(
('SCENE', 'Scene', '', 'SCENE_DATA', 0),
('ACTION_METADATA', 'Action Metadata',
'The frame rate will be determined by action\'s "psa_sequence_fps" custom property, if it exists. If the Sequence Source is Timeline Markers, the lowest value of all contributing actions will be used. If no metadata is available, the scene\'s frame rate will be used.',
'PROPERTIES', 1),
('ACTION_METADATA', 'Action Metadata', 'The frame rate will be determined by action\'s FPS property found in the PSA Export panel.\n\nIf the Sequence Source is Timeline Markers, the lowest value of all contributing actions will be used', 'PROPERTIES', 1),
('CUSTOM', 'Custom', '', 2)
)
)
@ -80,18 +134,20 @@ class PSA_PG_export(PropertyGroup):
action_list_index: IntProperty(default=0)
marker_list: CollectionProperty(type=PSA_PG_export_timeline_markers)
marker_list_index: IntProperty(default=0)
nla_strip_list: CollectionProperty(type=PSA_PG_export_nla_strip_list_item)
nla_strip_list_index: IntProperty(default=0)
bone_filter_mode: EnumProperty(
name='Bone Filter',
options=empty_set,
description='',
items=(
('ALL', 'All', 'All bones will be exported.'),
('BONE_GROUPS', 'Bone Groups', 'Only bones belonging to the selected bone groups and their ancestors will '
'be exported.'),
('BONE_COLLECTIONS', 'Bone Collections', 'Only bones belonging to the selected bone collections and their '
'ancestors will be exported.'),
)
)
bone_group_list: CollectionProperty(type=PSX_PG_bone_group_list_item)
bone_group_list_index: IntProperty(default=0, name='', description='')
bone_collection_list: CollectionProperty(type=PSX_PG_bone_collection_list_item)
bone_collection_list_index: IntProperty(default=0, name='', description='')
should_enforce_bone_name_restrictions: BoolProperty(
default=False,
name='Enforce Bone Name Restrictions',
@ -145,7 +201,7 @@ def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]:
if not pg.sequence_filter_asset:
for i, sequence in enumerate(sequences):
if hasattr(sequence, 'action') and sequence.action.asset_data is not None:
if hasattr(sequence, 'action') and sequence.action is not None and sequence.action.asset_data is not None:
flt_flags[i] &= ~bitflag_filter_item
if not pg.sequence_filter_pose_marker:
@ -164,5 +220,6 @@ def filter_sequences(pg: PSA_PG_export, sequences) -> List[int]:
classes = (
PSA_PG_export_action_list_item,
PSA_PG_export_timeline_markers,
PSA_PG_export_nla_strip_list_item,
PSA_PG_export,
)

View File

@ -16,7 +16,7 @@ class PSA_UL_export_sequences(UIList):
item = cast(PSA_PG_export_action_list_item, item)
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
if hasattr(item, 'action') and item.action.asset_data is not None:
if hasattr(item, 'action') and item.action is not None and item.action.asset_data is not None:
layout.label(text='', icon='ASSET_MANAGER')
row = layout.row(align=True)

View File

@ -166,6 +166,8 @@ class PSA_OT_import(Operator, ImportHelper):
options.should_write_keyframes = pg.should_write_keyframes
options.should_convert_to_samples = pg.should_convert_to_samples
options.bone_mapping_mode = pg.bone_mapping_mode
options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom
if len(sequence_names) == 0:
self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected')
@ -239,6 +241,10 @@ class PSA_OT_import(Operator, ImportHelper):
col.use_property_decorate = False
col.prop(pg, 'should_convert_to_samples')
col.separator()
# FPS
col.prop(pg, 'fps_source')
if pg.fps_source == 'CUSTOM':
col.prop(pg, 'fps_custom')
col = layout.column(heading='Options')
col.use_property_split = True

View File

@ -2,7 +2,8 @@ import re
from fnmatch import fnmatch
from typing import List
from bpy.props import StringProperty, BoolProperty, CollectionProperty, IntProperty, PointerProperty, EnumProperty
from bpy.props import StringProperty, BoolProperty, CollectionProperty, IntProperty, PointerProperty, EnumProperty, \
FloatProperty
from bpy.types import PropertyGroup, Text
empty_set = set()
@ -66,6 +67,21 @@ class PSA_PG_import(PropertyGroup):
'\'root\' can be mapped to the armature bone \'Root\')', 'CASE_INSENSITIVE', 1),
)
)
fps_source: EnumProperty(name='FPS Source', items=(
('SEQUENCE', 'Sequence', 'The sequence frame rate matches the original frame rate', 'ACTION', 0),
('SCENE', 'Scene', 'The sequence frame rate dilates to match that of the scene', 'SCENE_DATA', 1),
('CUSTOM', 'Custom', 'The sequence frame rate dilates to match a custom frame rate', 2),
))
fps_custom: FloatProperty(
default=30.0,
name='Custom FPS',
description='The frame rate to which the imported actions will be converted',
options=empty_set,
min=1.0,
soft_min=1.0,
soft_max=60.0,
step=100,
)
def filter_sequences(pg: PSA_PG_import, sequences) -> List[int]:

View File

@ -21,6 +21,8 @@ class PsaImportOptions(object):
self.action_name_prefix = ''
self.should_convert_to_samples = False
self.bone_mapping_mode = 'CASE_INSENSITIVE'
self.fps_source = 'SEQUENCE'
self.fps_custom: float = 30.0
class ImportBone(object):
@ -29,9 +31,9 @@ class ImportBone(object):
self.parent: Optional[ImportBone] = None
self.armature_bone = None
self.pose_bone = None
self.orig_loc: Vector = Vector()
self.orig_quat: Quaternion = Quaternion()
self.post_quat: Quaternion = Quaternion()
self.original_location: Vector = Vector()
self.original_rotation: Quaternion = Quaternion()
self.post_rotation: Quaternion = Quaternion()
self.fcurves: List[FCurve] = []
@ -39,17 +41,17 @@ def _calculate_fcurve_data(import_bone: ImportBone, key_data: typing.Iterable[fl
# Convert world-space transforms to local-space transforms.
key_rotation = Quaternion(key_data[0:4])
key_location = Vector(key_data[4:])
q = import_bone.post_quat.copy()
q.rotate(import_bone.orig_quat)
q = import_bone.post_rotation.copy()
q.rotate(import_bone.original_rotation)
quat = q
q = import_bone.post_quat.copy()
q = import_bone.post_rotation.copy()
if import_bone.parent is None:
q.rotate(key_rotation.conjugated())
else:
q.rotate(key_rotation)
quat.rotate(q.conjugated())
loc = key_location - import_bone.orig_loc
loc.rotate(import_bone.post_quat.conjugated())
loc = key_location - import_bone.original_location
loc.rotate(import_bone.post_rotation.conjugated())
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
@ -140,23 +142,16 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
import_bone.parent = import_bones_dict[armature_bone.parent.name]
# Calculate the original location & rotation of each bone (in world-space maybe?)
if armature_bone.get('orig_quat') is not None:
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect
# (animations are flipped 180 around Z)
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
if import_bone.parent is not None:
import_bone.original_location = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
import_bone.original_location.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion()
import_bone.original_rotation.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
import_bone.original_rotation.conjugate()
else:
if import_bone.parent is not None:
import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
import_bone.orig_quat.conjugate()
else:
import_bone.orig_loc = armature_bone.matrix_local.translation.copy()
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
import_bone.post_quat = import_bone.orig_quat.conjugated()
import_bone.original_location = armature_bone.matrix_local.translation.copy()
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion()
import_bone.post_rotation = import_bone.original_rotation.conjugated()
context.window_manager.progress_begin(0, len(sequences))
@ -172,6 +167,19 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
else:
action = bpy.data.actions.new(name=action_name)
# Calculate the target FPS.
target_fps = sequence.fps
if options.fps_source == 'CUSTOM':
target_fps = options.fps_custom
elif options.fps_source == 'SCENE':
target_fps = context.scene.render.fps
elif options.fps_source == 'SEQUENCE':
target_fps = sequence.fps
else:
raise ValueError(f'Unknown FPS source: {options.fps_source}')
keyframe_time_dilation = target_fps / sequence.fps
if options.should_write_keyframes:
# Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2)
while len(action.fcurves) > 0:
@ -208,7 +216,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
# Write the keyframes out.
fcurve_data = numpy.zeros(2 * sequence.frame_count, dtype=float)
fcurve_data[0::2] = range(sequence.frame_count)
fcurve_data[0::2] = [x * keyframe_time_dilation for x in range(sequence.frame_count)]
for bone_index, import_bone in enumerate(import_bones):
if import_bone is None:
continue
@ -216,6 +224,8 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
fcurve_data[1::2] = sequence_data_matrix[:, bone_index, fcurve_index]
fcurve.keyframe_points.add(sequence.frame_count)
fcurve.keyframe_points.foreach_set('co', fcurve_data)
for fcurve_keyframe in fcurve.keyframe_points:
fcurve_keyframe.interpolation = 'LINEAR'
if options.should_convert_to_samples:
# Bake the curve to samples.
@ -224,7 +234,7 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
# Write meta-data.
if options.should_write_metadata:
action['psa_sequence_fps'] = sequence.fps
action.psa_export.fps = target_fps
action.use_fake_user = options.should_use_fake_user

View File

@ -15,7 +15,7 @@ class PskInputObjects(object):
class PskBuildOptions(object):
def __init__(self):
self.bone_filter_mode = 'ALL'
self.bone_group_indices: List[int] = []
self.bone_collection_indices: List[int] = []
self.use_raw_mesh_data = True
self.material_names: List[str] = []
self.should_enforce_bone_name_restrictions = False
@ -58,11 +58,17 @@ def get_psk_input_objects(context) -> PskInputObjects:
return input_objects
def build_psk(context, options: PskBuildOptions) -> Psk:
input_objects = get_psk_input_objects(context)
class PskBuildResult(object):
def __init__(self):
self.psk = None
self.warnings = []
def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
input_objects = get_psk_input_objects(context)
armature_object: bpy.types.Object = input_objects.armature_object
result = PskBuildResult()
psk = Psk()
bones = []
@ -78,7 +84,7 @@ def build_psk(context, options: PskBuildOptions) -> Psk:
psk_bone.rotation = Quaternion.identity()
psk.bones.append(psk_bone)
else:
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_collection_indices)
armature_data = typing.cast(Armature, armature_object.data)
bones = [armature_data.bones[bone_name] for bone_name in bone_names]
@ -88,7 +94,11 @@ def build_psk(context, options: PskBuildOptions) -> Psk:
for bone in bones:
psk_bone = Psk.Bone()
psk_bone.name = bytes(bone.name, encoding='windows-1252')
try:
psk_bone.name = bytes(bone.name, encoding='windows-1252')
except UnicodeEncodeError:
raise RuntimeError(
f'Bone name "{bone.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
psk_bone.flags = 0
psk_bone.children_count = 0
@ -129,14 +139,17 @@ def build_psk(context, options: PskBuildOptions) -> Psk:
for material_name in material_names:
psk_material = Psk.Material()
psk_material.name = bytes(material_name, encoding='windows-1252')
try:
psk_material.name = bytes(material_name, encoding='windows-1252')
except UnicodeEncodeError:
raise RuntimeError(f'Material name "{material_name}" contains characters that cannot be encoded in the Windows-1252 codepage')
psk_material.texture_index = len(psk.materials)
psk.materials.append(psk_material)
for input_mesh_object in input_objects.mesh_objects:
# MATERIALS
material_indices = [material_names.index(material.name) for material in input_mesh_object.data.materials]
material_indices = [material_names.index(material_slot.material.name) for material_slot in input_mesh_object.material_slots]
# MESH DATA
if options.use_raw_mesh_data:
@ -161,6 +174,10 @@ def build_psk(context, options: PskBuildOptions) -> Psk:
mesh_object = bpy.data.objects.new('', mesh_data)
mesh_object.matrix_world = input_mesh_object.matrix_world
scale = (input_mesh_object.scale.x, input_mesh_object.scale.y, input_mesh_object.scale.z)
if any(map(lambda x: x < 0, scale)):
result.warnings.append(f'Mesh "{input_mesh_object.name}" has negative scaling which may result in inverted normals.')
# Copy the vertex groups
for vertex_group in input_mesh_object.vertex_groups:
mesh_object.vertex_groups.new(name=vertex_group.name)
@ -271,4 +288,6 @@ def build_psk(context, options: PskBuildOptions) -> Psk:
bpy.data.meshes.remove(mesh_data)
del mesh_data
return psk
result.psk = psk
return result

View File

@ -4,14 +4,14 @@ from bpy_extras.io_utils import ExportHelper
from ..builder import build_psk, PskBuildOptions, get_psk_input_objects
from ..writer import write_psk
from ...helpers import populate_bone_group_list
from ...helpers import populate_bone_collection_list
def is_bone_filter_mode_item_available(context, identifier):
input_objects = get_psk_input_objects(context)
armature_object = input_objects.armature_object
if identifier == 'BONE_GROUPS':
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
if identifier == 'BONE_COLLECTIONS':
if armature_object is None or armature_object.data is None or len(armature_object.data.collections) == 0:
return False
# else if... you can set up other conditions if you add more options
return True
@ -22,10 +22,11 @@ def populate_material_list(mesh_objects, material_list):
material_names = []
for mesh_object in mesh_objects:
for i, material in enumerate(mesh_object.data.materials):
for i, material_slot in enumerate(mesh_object.material_slots):
material = material_slot.material
# TODO: put this in the poll arg?
if material is None:
raise RuntimeError('Material cannot be empty (index ' + str(i) + ')')
raise RuntimeError('Material slot cannot be empty (index ' + str(i) + ')')
if material.name not in material_names:
material_names.append(material.name)
@ -94,8 +95,7 @@ class PSK_OT_export(Operator, ExportHelper):
pg = getattr(context.scene, 'psk_export')
# Populate bone groups list.
populate_bone_group_list(input_objects.armature_object, pg.bone_group_list)
populate_bone_collection_list(input_objects.armature_object, pg.bone_collection_list)
try:
populate_material_list(input_objects.mesh_objects, pg.material_list)
@ -136,10 +136,10 @@ class PSK_OT_export(Operator, ExportHelper):
item_layout.prop_enum(pg, 'bone_filter_mode', item.identifier)
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
if pg.bone_filter_mode == 'BONE_GROUPS':
if pg.bone_filter_mode == 'BONE_COLLECTIONS':
row = box.row()
rows = max(3, min(len(pg.bone_group_list), 10))
row.template_list('PSX_UL_bone_group_list', '', pg, 'bone_group_list', pg, 'bone_group_list_index', rows=rows)
rows = max(3, min(len(pg.bone_collection_list), 10))
row.template_list('PSX_UL_bone_collection_list', '', pg, 'bone_collection_list', pg, 'bone_collection_list_index', rows=rows)
box.prop(pg, 'should_enforce_bone_name_restrictions')
@ -147,7 +147,7 @@ class PSK_OT_export(Operator, ExportHelper):
box = layout.box()
box.label(text='Materials', icon='MATERIAL')
row = box.row()
rows = max(3, min(len(pg.bone_group_list), 10))
rows = max(3, min(len(pg.bone_collection_list), 10))
row.template_list('PSK_UL_materials', '', pg, 'material_list', pg, 'material_list_index', rows=rows)
col = row.column(align=True)
col.operator(PSK_OT_material_list_move_up.bl_idname, text='', icon='TRIA_UP')
@ -157,18 +157,24 @@ class PSK_OT_export(Operator, ExportHelper):
pg = context.scene.psk_export
options = PskBuildOptions()
options.bone_filter_mode = pg.bone_filter_mode
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
options.bone_collection_indices = [x.index for x in pg.bone_collection_list if x.is_selected]
options.use_raw_mesh_data = pg.use_raw_mesh_data
options.material_names = [m.material_name for m in pg.material_list]
options.should_enforce_bone_name_restrictions = pg.should_enforce_bone_name_restrictions
try:
psk = build_psk(context, options)
write_psk(psk, self.filepath)
self.report({'INFO'}, f'PSK export successful')
result = build_psk(context, options)
for warning in result.warnings:
self.report({'WARNING'}, warning)
write_psk(result.psk, self.filepath)
if len(result.warnings) > 0:
self.report({'WARNING'}, f'PSK export successful with {len(result.warnings)} warnings')
else:
self.report({'INFO'}, f'PSK export successful')
except RuntimeError as e:
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
return {'CANCELLED'}
return {'FINISHED'}

View File

@ -1,7 +1,7 @@
from bpy.props import EnumProperty, CollectionProperty, IntProperty, BoolProperty, StringProperty
from bpy.types import PropertyGroup
from ...types import PSX_PG_bone_group_list_item
from ...types import PSX_PG_bone_collection_list_item
class PSK_PG_material_list_item(PropertyGroup):
@ -16,12 +16,12 @@ class PSK_PG_export(PropertyGroup):
description='',
items=(
('ALL', 'All', 'All bones will be exported'),
('BONE_GROUPS', 'Bone Groups',
'Only bones belonging to the selected bone groups and their ancestors will be exported')
('BONE_COLLECTIONS', 'Bone Collections',
'Only bones belonging to the selected bone collections and their ancestors will be exported')
)
)
bone_group_list: CollectionProperty(type=PSX_PG_bone_group_list_item)
bone_group_list_index: IntProperty(default=0)
bone_collection_list: CollectionProperty(type=PSX_PG_bone_collection_list_item)
bone_collection_list_index: IntProperty(default=0)
use_raw_mesh_data: BoolProperty(default=False, name='Raw Mesh Data', description='No modifiers will be evaluated as part of the exported mesh')
material_list: CollectionProperty(type=PSK_PG_material_list_item)
material_list_index: IntProperty(default=0)

View File

@ -39,9 +39,9 @@ class ImportBone:
self.world_rotation_matrix: Matrix = Matrix()
self.world_matrix: Matrix = Matrix()
self.vertex_group = None
self.orig_quat: Quaternion = Quaternion()
self.orig_loc: Vector = Vector()
self.post_quat: Quaternion = Quaternion()
self.original_rotation: Quaternion = Quaternion()
self.original_location: Vector = Vector()
self.post_rotation: Quaternion = Quaternion()
class PskImportResult:
@ -111,12 +111,6 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
edit_bone_matrix.translation = import_bone.world_matrix.translation
edit_bone.matrix = edit_bone_matrix
# Store bind pose information in the bone's custom properties.
# This information is used when importing animations from PSA files.
edit_bone['orig_quat'] = import_bone.local_rotation
edit_bone['orig_loc'] = import_bone.local_translation
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
# MESH
if options.should_import_mesh:
mesh_data = bpy.data.meshes.new(options.name)
@ -131,7 +125,7 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
# Material already exists, just re-use it.
material = bpy.data.materials[material_name]
elif is_bdk_addon_loaded() and psk.has_material_references:
# Material does not yet exist and we have the BDK addon installed.
# Material does not yet exist, and we have the BDK addon installed.
# Attempt to load it using BDK addon's operator.
material_reference = psk.material_references[material_index]
if material_reference and bpy.ops.bdk.link_material(reference=material_reference) == {'FINISHED'}:
@ -228,12 +222,13 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
# VERTEX NORMALS
if psk.has_vertex_normals and options.should_import_vertex_normals:
mesh_data.polygons.foreach_set("use_smooth", [True] * len(mesh_data.polygons))
mesh_data.polygons.foreach_set('use_smooth', [True] * len(mesh_data.polygons))
normals = []
for vertex_normal in psk.vertex_normals:
normals.append(tuple(vertex_normal))
mesh_data.normals_split_custom_set_from_vertices(normals)
mesh_data.use_auto_smooth = True
else:
mesh_data.shade_smooth()
bm.normal_update()
bm.free()

View File

@ -2,7 +2,7 @@ from bpy.props import StringProperty, IntProperty, BoolProperty, FloatProperty
from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Panel
class PSX_UL_bone_group_list(UIList):
class PSX_UL_bone_collection_list(UIList):
def draw_item(self, context: Context, layout: UILayout, data: AnyType, item: AnyType, icon: int,
active_data: AnyType, active_property: str, index: int = 0, flt_flag: int = 0):
@ -11,7 +11,7 @@ class PSX_UL_bone_group_list(UIList):
row.label(text=str(getattr(item, 'count')), icon='BONE_DATA')
class PSX_PG_bone_group_list_item(PropertyGroup):
class PSX_PG_bone_collection_list_item(PropertyGroup):
name: StringProperty()
index: IntProperty()
count: IntProperty()
@ -21,6 +21,7 @@ class PSX_PG_bone_group_list_item(PropertyGroup):
class PSX_PG_action_export(PropertyGroup):
compression_ratio: FloatProperty(name='Compression Ratio', default=1.0, min=0.0, max=1.0, subtype='FACTOR', description='The key sampling ratio of the exported sequence.\n\nA compression ratio of 1.0 will export all frames, while a compression ratio of 0.5 will export half of the frames')
key_quota: IntProperty(name='Key Quota', default=0, min=1, description='The minimum number of frames to be exported')
fps: FloatProperty(name='FPS', default=30.0, min=0.0, description='The frame rate of the exported sequence')
class PSX_PT_action(Panel):
@ -38,13 +39,17 @@ class PSX_PT_action(Panel):
def draw(self, context: 'Context'):
action = context.active_action
layout = self.layout
layout.prop(action.psa_export, 'compression_ratio')
layout.prop(action.psa_export, 'key_quota')
flow = layout.grid_flow(columns=1)
flow.use_property_split = True
flow.use_property_decorate = False
flow.prop(action.psa_export, 'compression_ratio')
flow.prop(action.psa_export, 'key_quota')
flow.prop(action.psa_export, 'fps')
classes = (
PSX_PG_action_export,
PSX_PG_bone_group_list_item,
PSX_UL_bone_group_list,
PSX_PG_bone_collection_list_item,
PSX_UL_bone_collection_list,
PSX_PT_action
)