mirror of
https://github.com/DarklightGames/io_scene_psk_psa.git
synced 2025-02-17 10:08:31 +01:00
Merge branch 'blender-4.1' into scale_keys
# Conflicts: # io_scene_psk_psa/psa/import_/operators.py # io_scene_psk_psa/psa/importer.py
This commit is contained in:
commit
9d3ef50907
@ -1,15 +1,15 @@
|
||||
from bpy.app.handlers import persistent
|
||||
|
||||
bl_info = {
|
||||
"name": "PSK/PSA Importer/Exporter",
|
||||
"author": "Colin Basnett, Yurii Ti",
|
||||
"version": (6, 1, 1),
|
||||
"blender": (4, 0, 0),
|
||||
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
||||
"warning": "",
|
||||
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
|
||||
"tracker_url": "https://github.com/DarklightGames/io_scene_psk_psa/issues",
|
||||
"category": "Import-Export"
|
||||
'name': 'PSK/PSA Importer/Exporter',
|
||||
'author': 'Colin Basnett, Yurii Ti',
|
||||
'version': (6, 2, 0),
|
||||
'blender': (4, 0, 0),
|
||||
'description': 'PSK/PSA Import/Export (.psk/.psa)',
|
||||
'warning': '',
|
||||
'doc_url': 'https://github.com/DarklightGames/io_scene_psk_psa',
|
||||
'tracker_url': 'https://github.com/DarklightGames/io_scene_psk_psa/issues',
|
||||
'category': 'Import-Export'
|
||||
}
|
||||
|
||||
if 'bpy' in locals():
|
||||
@ -24,6 +24,8 @@ if 'bpy' in locals():
|
||||
importlib.reload(psk_writer)
|
||||
importlib.reload(psk_builder)
|
||||
importlib.reload(psk_importer)
|
||||
importlib.reload(psk_properties)
|
||||
importlib.reload(psk_ui)
|
||||
importlib.reload(psk_export_properties)
|
||||
importlib.reload(psk_export_operators)
|
||||
importlib.reload(psk_export_ui)
|
||||
@ -34,6 +36,7 @@ if 'bpy' in locals():
|
||||
importlib.reload(psa_reader)
|
||||
importlib.reload(psa_writer)
|
||||
importlib.reload(psa_builder)
|
||||
importlib.reload(psa_importer)
|
||||
importlib.reload(psa_export_properties)
|
||||
importlib.reload(psa_export_operators)
|
||||
importlib.reload(psa_export_ui)
|
||||
@ -50,6 +53,8 @@ else:
|
||||
from .psk import writer as psk_writer
|
||||
from .psk import builder as psk_builder
|
||||
from .psk import importer as psk_importer
|
||||
from .psk import properties as psk_properties
|
||||
from .psk import ui as psk_ui
|
||||
from .psk.export import properties as psk_export_properties
|
||||
from .psk.export import operators as psk_export_operators
|
||||
from .psk.export import ui as psk_export_ui
|
||||
@ -72,6 +77,8 @@ import bpy
|
||||
from bpy.props import PointerProperty
|
||||
|
||||
classes = psx_types.classes +\
|
||||
psk_properties.classes +\
|
||||
psk_ui.classes +\
|
||||
psk_import_operators.classes +\
|
||||
psk_export_properties.classes +\
|
||||
psk_export_operators.classes +\
|
||||
@ -107,6 +114,7 @@ def register():
|
||||
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.append(psa_import_menu_func)
|
||||
bpy.types.Material.psk = PointerProperty(type=psk_properties.PSX_PG_material)
|
||||
bpy.types.Scene.psa_import = PointerProperty(type=psa_import_properties.PSA_PG_import)
|
||||
bpy.types.Scene.psa_export = PointerProperty(type=psa_export_properties.PSA_PG_export)
|
||||
bpy.types.Scene.psk_export = PointerProperty(type=psk_export_properties.PSK_PG_export)
|
||||
@ -114,6 +122,7 @@ def register():
|
||||
|
||||
|
||||
def unregister():
|
||||
del bpy.types.Material.psk
|
||||
del bpy.types.Scene.psa_import
|
||||
del bpy.types.Scene.psa_export
|
||||
del bpy.types.Scene.psk_export
|
||||
|
@ -30,12 +30,12 @@ def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, fr
|
||||
|
||||
|
||||
def populate_bone_collection_list(armature_object: Object, bone_collection_list: bpy.props.CollectionProperty) -> None:
|
||||
"""
|
||||
'''
|
||||
Updates the bone collections collection.
|
||||
|
||||
Bone collection selections are preserved between updates unless none of the groups were previously selected;
|
||||
otherwise, all collections are selected by default.
|
||||
"""
|
||||
'''
|
||||
has_selected_collections = any([g.is_selected for g in bone_collection_list])
|
||||
unassigned_collection_is_selected, selected_assigned_collection_names = True, []
|
||||
|
||||
@ -84,7 +84,7 @@ def check_bone_names(bone_names: Iterable[str]):
|
||||
|
||||
|
||||
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: List[int]) -> List[str]:
|
||||
"""
|
||||
'''
|
||||
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
|
||||
|
||||
Note that the ancestors of bones within the bone collections will also be present in the returned list.
|
||||
@ -93,7 +93,7 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
|
||||
:param bone_filter_mode: One of ['ALL', 'BONE_COLLECTIONS']
|
||||
:param bone_collection_indices: List of bone collection indices to be exported.
|
||||
:return: A sorted list of bone indices that should be exported.
|
||||
"""
|
||||
'''
|
||||
if armature_object is None or armature_object.type != 'ARMATURE':
|
||||
raise ValueError('An armature object must be supplied')
|
||||
|
||||
|
@ -41,7 +41,7 @@ def _load_config_file(file_path: str) -> ConfigParser:
|
||||
def _get_bone_flags_from_value(value: str) -> int:
|
||||
match value:
|
||||
case 'all':
|
||||
return (REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION)
|
||||
return REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION
|
||||
case 'trans':
|
||||
return REMOVE_TRACK_LOCATION
|
||||
case 'rot':
|
||||
|
@ -4,10 +4,10 @@ from typing import List
|
||||
|
||||
from ..data import *
|
||||
|
||||
"""
|
||||
'''
|
||||
Note that keys are not stored within the Psa object.
|
||||
Use the PsaReader::get_sequence_keys to get the keys for a sequence.
|
||||
"""
|
||||
'''
|
||||
|
||||
|
||||
class Psa:
|
||||
|
@ -47,7 +47,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
|
||||
if not is_action_for_armature(armature, action):
|
||||
continue
|
||||
|
||||
if not action.name.startswith('#'):
|
||||
if action.name != '' and not action.name.startswith('#'):
|
||||
for (name, frame_start, frame_end) in get_sequences_from_action(action):
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
@ -60,7 +60,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
|
||||
# Pose markers are not guaranteed to be in frame-order, so make sure that they are.
|
||||
pose_markers = sorted(action.pose_markers, key=lambda x: x.frame)
|
||||
for pose_marker_index, pose_marker in enumerate(pose_markers):
|
||||
if pose_marker.name.startswith('#'):
|
||||
if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'):
|
||||
continue
|
||||
for (name, frame_start, frame_end) in get_sequences_from_action_pose_marker(action, pose_markers, pose_marker, pose_marker_index):
|
||||
item = pg.action_list.add()
|
||||
@ -78,7 +78,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
|
||||
for marker_name in marker_names:
|
||||
if marker_name not in sequence_frame_ranges:
|
||||
continue
|
||||
if marker_name.startswith('#'):
|
||||
if marker_name.strip() == '' or marker_name.startswith('#'):
|
||||
continue
|
||||
frame_start, frame_end = sequence_frame_ranges[marker_name]
|
||||
sequences = get_sequences_from_name_and_frame_range(marker_name, frame_start, frame_end)
|
||||
@ -91,15 +91,16 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
|
||||
|
||||
|
||||
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
|
||||
if fps_source == 'SCENE':
|
||||
return context.scene.render.fps
|
||||
elif fps_source == 'CUSTOM':
|
||||
return fps_custom
|
||||
elif fps_source == 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
return min([action.psa_export.fps for action in actions])
|
||||
else:
|
||||
raise RuntimeError(f'Invalid FPS source "{fps_source}"')
|
||||
match fps_source:
|
||||
case 'SCENE':
|
||||
return context.scene.render.fps
|
||||
case 'CUSTOM':
|
||||
return fps_custom
|
||||
case 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
return min([action.psa_export.fps for action in actions])
|
||||
case _:
|
||||
raise RuntimeError(f'Invalid FPS source "{fps_source}"')
|
||||
|
||||
|
||||
def get_animation_data_object(context: Context) -> Object:
|
||||
@ -110,7 +111,7 @@ def get_animation_data_object(context: Context) -> Object:
|
||||
if active_object.type != 'ARMATURE':
|
||||
raise RuntimeError('Selected object must be an Armature')
|
||||
|
||||
if pg.should_override_animation_data:
|
||||
if pg.sequence_source != 'ACTIONS' and pg.should_override_animation_data:
|
||||
animation_data_object = pg.animation_data_override
|
||||
else:
|
||||
animation_data_object = active_object
|
||||
|
@ -30,21 +30,19 @@ class PSA_UL_export_sequences(UIList):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
row = layout.row()
|
||||
subrow = row.row(align=True)
|
||||
subrow.prop(pg, 'sequence_filter_name', text="")
|
||||
subrow.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
|
||||
# subrow.prop(pg, 'sequence_use_filter_sort_reverse', text='', icon='SORT_ASC')
|
||||
subrow.prop(pg, 'sequence_filter_name', text='')
|
||||
subrow.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
|
||||
|
||||
if pg.sequence_source == 'ACTIONS':
|
||||
subrow = row.row(align=True)
|
||||
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
||||
subrow.prop(pg, 'sequence_filter_pose_marker', icon_only=True, icon='PMARKER')
|
||||
subrow.prop(pg, 'sequence_filter_reversed', text="", icon='FRAME_PREV')
|
||||
subrow.prop(pg, 'sequence_filter_reversed', text='', icon='FRAME_PREV')
|
||||
|
||||
def filter_items(self, context, data, prop):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
actions = getattr(data, prop)
|
||||
flt_flags = filter_sequences(pg, actions)
|
||||
# flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
||||
flt_neworder = list(range(len(actions)))
|
||||
return flt_flags, flt_neworder
|
||||
|
||||
|
@ -2,7 +2,7 @@ import os
|
||||
from pathlib import Path
|
||||
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import Operator, Event, Context
|
||||
from bpy.types import Operator, Event, Context, FileHandler
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
from .properties import get_visible_sequences
|
||||
@ -89,23 +89,6 @@ class PSA_OT_import_sequences_deselect_all(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_OT_import_select_file(Operator):
|
||||
bl_idname = 'psa_import.select_file'
|
||||
bl_label = 'Select'
|
||||
bl_options = {'INTERNAL'}
|
||||
bl_description = 'Select a PSA file from which to import animations'
|
||||
filepath: StringProperty(subtype='FILE_PATH')
|
||||
filter_glob: StringProperty(default="*.psa", options={'HIDDEN'})
|
||||
|
||||
def execute(self, context):
|
||||
getattr(context.scene, 'psa_import').psa_file_path = self.filepath
|
||||
return {"FINISHED"}
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
|
||||
def load_psa_file(context, filepath: str):
|
||||
pg = context.scene.psa_import
|
||||
pg.sequence_list.clear()
|
||||
@ -208,69 +191,83 @@ class PSA_OT_import(Operator, ImportHelper):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
|
||||
if pg.psa_error:
|
||||
row = layout.row()
|
||||
row.label(text='Select a PSA file', icon='ERROR')
|
||||
else:
|
||||
box = layout.box()
|
||||
sequences_header, sequences_panel = layout.panel('sequences_panel_id', default_closed=False)
|
||||
sequences_header.label(text='Sequences')
|
||||
|
||||
box.label(text=f'Sequences ({len(pg.sequence_list)})', icon='ARMATURE_DATA')
|
||||
if sequences_panel:
|
||||
if pg.psa_error:
|
||||
row = sequences_panel.row()
|
||||
row.label(text='Select a PSA file', icon='ERROR')
|
||||
else:
|
||||
# Select buttons.
|
||||
rows = max(3, min(len(pg.sequence_list), 10))
|
||||
|
||||
# Select buttons.
|
||||
rows = max(3, min(len(pg.sequence_list), 10))
|
||||
row = sequences_panel.row()
|
||||
col = row.column()
|
||||
|
||||
row = box.row()
|
||||
col = row.column()
|
||||
row2 = col.row(align=True)
|
||||
row2.label(text='Select')
|
||||
row2.operator(PSA_OT_import_sequences_from_text.bl_idname, text='', icon='TEXT')
|
||||
row2.operator(PSA_OT_import_sequences_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row2.operator(PSA_OT_import_sequences_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
|
||||
row2 = col.row(align=True)
|
||||
row2.label(text='Select')
|
||||
row2.operator(PSA_OT_import_sequences_from_text.bl_idname, text='', icon='TEXT')
|
||||
row2.operator(PSA_OT_import_sequences_select_all.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row2.operator(PSA_OT_import_sequences_deselect_all.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
col = col.row()
|
||||
col.template_list('PSA_UL_import_sequences', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
||||
|
||||
col = col.row()
|
||||
col.template_list('PSA_UL_import_sequences', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
||||
|
||||
col = layout.column(heading='')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_overwrite')
|
||||
|
||||
col = layout.column(heading='Write')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_write_keyframes')
|
||||
col.prop(pg, 'should_write_metadata')
|
||||
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'bone_mapping_mode')
|
||||
|
||||
if pg.should_write_keyframes:
|
||||
col = layout.column(heading='Keyframes')
|
||||
col = sequences_panel.column(heading='')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_convert_to_samples')
|
||||
col.separator()
|
||||
# FPS
|
||||
col.prop(pg, 'fps_source')
|
||||
if pg.fps_source == 'CUSTOM':
|
||||
col.prop(pg, 'fps_custom')
|
||||
# Scale Keys
|
||||
col.prop(pg, 'should_overwrite')
|
||||
col.prop(pg, 'should_use_action_name_prefix')
|
||||
if pg.should_use_action_name_prefix:
|
||||
col.prop(pg, 'action_name_prefix')
|
||||
|
||||
data_header, data_panel = layout.panel('data_panel_id', default_closed=False)
|
||||
data_header.label(text='Data')
|
||||
|
||||
if data_panel:
|
||||
col = data_panel.column(heading='Write')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_write_keyframes')
|
||||
col.prop(pg, 'should_write_metadata')
|
||||
col.prop(pg, 'should_write_scale_keys')
|
||||
|
||||
col = layout.column(heading='Options')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_use_fake_user')
|
||||
col.prop(pg, 'should_stash')
|
||||
col.prop(pg, 'should_use_config_file')
|
||||
if pg.should_write_keyframes:
|
||||
col = col.column(heading='Keyframes')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_convert_to_samples')
|
||||
|
||||
col.prop(pg, 'should_use_action_name_prefix')
|
||||
advanced_header, advanced_panel = layout.panel('advanced_panel_id', default_closed=True)
|
||||
advanced_header.label(text='Advanced')
|
||||
|
||||
if pg.should_use_action_name_prefix:
|
||||
col.prop(pg, 'action_name_prefix')
|
||||
if advanced_panel:
|
||||
col = advanced_panel.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'bone_mapping_mode')
|
||||
|
||||
col = advanced_panel.column(heading='Options')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_use_fake_user')
|
||||
col.prop(pg, 'should_stash')
|
||||
col.prop(pg, 'should_use_config_file')
|
||||
|
||||
|
||||
class PSA_FH_import(FileHandler):
|
||||
bl_idname = 'PSA_FH_import'
|
||||
bl_label = 'File handler for Unreal PSA import'
|
||||
bl_import_operator = 'psa_import.import'
|
||||
bl_file_extensions = '.psa'
|
||||
|
||||
@classmethod
|
||||
def poll_drop(cls, context: Context):
|
||||
return context.area and context.area.type == 'VIEW_3D'
|
||||
|
||||
|
||||
classes = (
|
||||
@ -278,5 +275,5 @@ classes = (
|
||||
PSA_OT_import_sequences_deselect_all,
|
||||
PSA_OT_import_sequences_from_text,
|
||||
PSA_OT_import,
|
||||
PSA_OT_import_select_file,
|
||||
PSA_FH_import,
|
||||
)
|
||||
|
@ -77,13 +77,13 @@ class PSA_PG_import(PropertyGroup):
|
||||
)
|
||||
fps_source: EnumProperty(name='FPS Source', items=(
|
||||
('SEQUENCE', 'Sequence', 'The sequence frame rate matches the original frame rate', 'ACTION', 0),
|
||||
('SCENE', 'Scene', 'The sequence frame rate dilates to match that of the scene', 'SCENE_DATA', 1),
|
||||
('CUSTOM', 'Custom', 'The sequence frame rate dilates to match a custom frame rate', 2),
|
||||
('SCENE', 'Scene', 'The sequence is resampled to the frame rate of the scene', 'SCENE_DATA', 1),
|
||||
('CUSTOM', 'Custom', 'The sequence is resampled to a custom frame rate', 2),
|
||||
))
|
||||
fps_custom: FloatProperty(
|
||||
default=30.0,
|
||||
name='Custom FPS',
|
||||
description='The frame rate to which the imported actions will be converted',
|
||||
description='The frame rate to which the imported sequences will be resampled to',
|
||||
options=empty_set,
|
||||
min=1.0,
|
||||
soft_min=1.0,
|
||||
|
@ -17,10 +17,10 @@ class PSA_UL_sequences(UIList):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
row = layout.row()
|
||||
sub_row = row.row(align=True)
|
||||
sub_row.prop(pg, 'sequence_filter_name', text="")
|
||||
sub_row.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
|
||||
sub_row.prop(pg, 'sequence_use_filter_regex', text="", icon='SORTBYEXT')
|
||||
sub_row.prop(pg, 'sequence_filter_is_selected', text="", icon='CHECKBOX_HLT')
|
||||
sub_row.prop(pg, 'sequence_filter_name', text='')
|
||||
sub_row.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
|
||||
sub_row.prop(pg, 'sequence_use_filter_regex', text='', icon='SORTBYEXT')
|
||||
sub_row.prop(pg, 'sequence_filter_is_selected', text='', icon='CHECKBOX_HLT')
|
||||
|
||||
def filter_items(self, context, data, property_):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
|
@ -2,7 +2,7 @@ import typing
|
||||
from typing import List, Optional, Iterable
|
||||
|
||||
import bpy
|
||||
import numpy
|
||||
import numpy as np
|
||||
from bpy.types import FCurve, Object, Context
|
||||
from mathutils import Vector, Quaternion
|
||||
|
||||
@ -48,16 +48,16 @@ def _calculate_fcurve_data(import_bone: ImportBone, key_data: Iterable[float]):
|
||||
key_location = Vector(key_data[4:])
|
||||
q = import_bone.post_rotation.copy()
|
||||
q.rotate(import_bone.original_rotation)
|
||||
quat = q
|
||||
rotation = q
|
||||
q = import_bone.post_rotation.copy()
|
||||
if import_bone.parent is None:
|
||||
q.rotate(key_rotation.conjugated())
|
||||
else:
|
||||
q.rotate(key_rotation)
|
||||
quat.rotate(q.conjugated())
|
||||
loc = key_location - import_bone.original_location
|
||||
loc.rotate(import_bone.post_rotation.conjugated())
|
||||
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
|
||||
rotation.rotate(q.conjugated())
|
||||
location = key_location - import_bone.original_location
|
||||
location.rotate(import_bone.post_rotation.conjugated())
|
||||
return rotation.w, rotation.x, rotation.y, rotation.z, location.x, location.y, location.z
|
||||
|
||||
|
||||
class PsaImportResult:
|
||||
@ -81,6 +81,51 @@ def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_name
|
||||
return armature_bone_index
|
||||
return None
|
||||
|
||||
def _get_sample_frame_times(source_frame_count: int, frame_step: float) -> typing.Iterable[float]:
|
||||
# TODO: for correctness, we should also emit the target frame time as well (because the last frame can be a
|
||||
# fractional frame).
|
||||
time = 0.0
|
||||
while time < source_frame_count - 1:
|
||||
yield time
|
||||
time += frame_step
|
||||
yield source_frame_count - 1
|
||||
|
||||
def _resample_sequence_data_matrix(sequence_data_matrix: np.ndarray, frame_step: float = 1.0) -> np.ndarray:
|
||||
"""
|
||||
Resamples the sequence data matrix to the target frame count.
|
||||
@param sequence_data_matrix: FxBx7 matrix where F is the number of frames, B is the number of bones, and X is the
|
||||
number of data elements per bone.
|
||||
@param frame_step: The step between frames in the resampled sequence.
|
||||
@return: The resampled sequence data matrix, or sequence_data_matrix if no resampling is necessary.
|
||||
"""
|
||||
if frame_step == 1.0:
|
||||
# No resampling is necessary.
|
||||
return sequence_data_matrix
|
||||
|
||||
source_frame_count, bone_count = sequence_data_matrix.shape[:2]
|
||||
sample_frame_times = list(_get_sample_frame_times(source_frame_count, frame_step))
|
||||
target_frame_count = len(sample_frame_times)
|
||||
resampled_sequence_data_matrix = np.zeros((target_frame_count, bone_count, 7), dtype=float)
|
||||
|
||||
for sample_frame_index, sample_frame_time in enumerate(sample_frame_times):
|
||||
frame_index = int(sample_frame_time)
|
||||
if sample_frame_time % 1.0 == 0.0:
|
||||
# Sample time has no fractional part, so just copy the frame.
|
||||
resampled_sequence_data_matrix[sample_frame_index, :, :] = sequence_data_matrix[frame_index, :, :]
|
||||
else:
|
||||
# Sample time has a fractional part, so interpolate between two frames.
|
||||
next_frame_index = frame_index + 1
|
||||
for bone_index in range(bone_count):
|
||||
source_frame_1_data = sequence_data_matrix[frame_index, bone_index, :]
|
||||
source_frame_2_data = sequence_data_matrix[next_frame_index, bone_index, :]
|
||||
factor = sample_frame_time - frame_index
|
||||
q = Quaternion((source_frame_1_data[:4])).slerp(Quaternion((source_frame_2_data[:4])), factor)
|
||||
q.normalize()
|
||||
l = Vector(source_frame_1_data[4:]).lerp(Vector(source_frame_2_data[4:]), factor)
|
||||
resampled_sequence_data_matrix[sample_frame_index, bone_index, :] = q.w, q.x, q.y, q.z, l.x, l.y, l.z
|
||||
|
||||
return resampled_sequence_data_matrix
|
||||
|
||||
|
||||
def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object, options: PsaImportOptions) -> PsaImportResult:
|
||||
result = PsaImportResult()
|
||||
@ -144,8 +189,10 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
|
||||
for import_bone in filter(lambda x: x is not None, import_bones):
|
||||
armature_bone = import_bone.armature_bone
|
||||
|
||||
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
|
||||
import_bone.parent = import_bones_dict[armature_bone.parent.name]
|
||||
|
||||
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
||||
if import_bone.parent is not None:
|
||||
import_bone.original_location = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
||||
@ -155,7 +202,8 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
import_bone.original_rotation.conjugate()
|
||||
else:
|
||||
import_bone.original_location = armature_bone.matrix_local.translation.copy()
|
||||
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion()
|
||||
import_bone.original_rotation = armature_bone.matrix_local.to_quaternion().conjugated()
|
||||
|
||||
import_bone.post_rotation = import_bone.original_rotation.conjugated()
|
||||
|
||||
context.window_manager.progress_begin(0, len(sequences))
|
||||
@ -178,21 +226,19 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
action = bpy.data.actions.new(name=action_name)
|
||||
|
||||
# Calculate the target FPS.
|
||||
if options.fps_source == 'CUSTOM':
|
||||
target_fps = options.fps_custom
|
||||
elif options.fps_source == 'SCENE':
|
||||
target_fps = context.scene.render.fps
|
||||
elif options.fps_source == 'SEQUENCE':
|
||||
target_fps = sequence.fps
|
||||
else:
|
||||
raise ValueError(f'Unknown FPS source: {options.fps_source}')
|
||||
|
||||
keyframe_time_dilation = target_fps / sequence.fps
|
||||
match options.fps_source:
|
||||
case 'CUSTOM':
|
||||
target_fps = options.fps_custom
|
||||
case 'SCENE':
|
||||
target_fps = context.scene.render.fps
|
||||
case 'SEQUENCE':
|
||||
target_fps = sequence.fps
|
||||
case _:
|
||||
raise ValueError(f'Unknown FPS source: {options.fps_source}')
|
||||
|
||||
if options.should_write_keyframes:
|
||||
# Remove existing f-curves (replace with action.fcurves.clear() in Blender 3.2)
|
||||
while len(action.fcurves) > 0:
|
||||
action.fcurves.remove(action.fcurves[-1])
|
||||
# Remove existing f-curves.
|
||||
action.fcurves.clear()
|
||||
|
||||
# Create f-curves for the rotation and location of each bone.
|
||||
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||
@ -234,19 +280,25 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
|
||||
# Calculate the local-space key data for the bone.
|
||||
sequence_data_matrix[frame_index, bone_index] = _calculate_fcurve_data(import_bone, key_data)
|
||||
|
||||
# Write the keyframes out.
|
||||
fcurve_data = numpy.zeros(2 * sequence.frame_count, dtype=float)
|
||||
# Resample the sequence data to the target FPS.
|
||||
# If the target frame count is the same as the source frame count, this will be a no-op.
|
||||
resampled_sequence_data_matrix = _resample_sequence_data_matrix(sequence_data_matrix,
|
||||
frame_step=sequence.fps / target_fps)
|
||||
|
||||
# Write the keyframes out.
|
||||
# Note that the f-curve data consists of alternating time and value data.
|
||||
target_frame_count = resampled_sequence_data_matrix.shape[0]
|
||||
fcurve_data = np.zeros(2 * target_frame_count, dtype=float)
|
||||
fcurve_data[0::2] = range(0, target_frame_count)
|
||||
|
||||
# Populate the keyframe time data.
|
||||
fcurve_data[0::2] = [x * keyframe_time_dilation for x in range(sequence.frame_count)]
|
||||
for bone_index, import_bone in enumerate(import_bones):
|
||||
if import_bone is None:
|
||||
continue
|
||||
for fcurve_index, fcurve in enumerate(import_bone.fcurves):
|
||||
if fcurve is None:
|
||||
continue
|
||||
fcurve_data[1::2] = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||
fcurve.keyframe_points.add(sequence.frame_count)
|
||||
fcurve_data[1::2] = resampled_sequence_data_matrix[:, bone_index, fcurve_index]
|
||||
fcurve.keyframe_points.add(target_frame_count)
|
||||
fcurve.keyframe_points.foreach_set('co', fcurve_data)
|
||||
for fcurve_keyframe in fcurve.keyframe_points:
|
||||
fcurve_keyframe.interpolation = 'LINEAR'
|
||||
|
@ -12,15 +12,14 @@ def _try_fix_cue4parse_issue_103(sequences) -> bool:
|
||||
# The issue was that the frame_start_index was not being set correctly, and was always being set to the same value
|
||||
# as the frame_count.
|
||||
# This fix will eventually be deprecated as it is only necessary for files exported prior to the fix.
|
||||
if len(sequences) > 0:
|
||||
if sequences[0].frame_start_index == sequences[0].frame_count:
|
||||
# Manually set the frame_start_index for each sequence. This assumes that the sequences are in order with
|
||||
# no shared frames between sequences (all exporters that I know of do this, so it's a safe assumption).
|
||||
frame_start_index = 0
|
||||
for i, sequence in enumerate(sequences):
|
||||
sequence.frame_start_index = frame_start_index
|
||||
frame_start_index += sequence.frame_count
|
||||
return True
|
||||
if len(sequences) > 0 and sequences[0].frame_start_index == sequences[0].frame_count:
|
||||
# Manually set the frame_start_index for each sequence. This assumes that the sequences are in order with
|
||||
# no shared frames between sequences (all exporters that I know of do this, so it's a safe assumption).
|
||||
frame_start_index = 0
|
||||
for i, sequence in enumerate(sequences):
|
||||
sequence.frame_start_index = frame_start_index
|
||||
frame_start_index += sequence.frame_count
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,15 +1,19 @@
|
||||
from typing import Optional
|
||||
|
||||
import bmesh
|
||||
import bpy
|
||||
from bpy.types import Armature
|
||||
import numpy as np
|
||||
from bpy.types import Armature, Material
|
||||
|
||||
from .data import *
|
||||
from .properties import triangle_type_and_bit_flags_to_poly_flags
|
||||
from ..helpers import *
|
||||
|
||||
|
||||
class PskInputObjects(object):
|
||||
def __init__(self):
|
||||
self.mesh_objects = []
|
||||
self.armature_object = None
|
||||
self.armature_object: Optional[Object] = None
|
||||
|
||||
|
||||
class PskBuildOptions(object):
|
||||
@ -17,7 +21,7 @@ class PskBuildOptions(object):
|
||||
self.bone_filter_mode = 'ALL'
|
||||
self.bone_collection_indices: List[int] = []
|
||||
self.use_raw_mesh_data = True
|
||||
self.material_names: List[str] = []
|
||||
self.materials: List[Material] = []
|
||||
self.should_enforce_bone_name_restrictions = False
|
||||
|
||||
|
||||
@ -61,7 +65,7 @@ def get_psk_input_objects(context) -> PskInputObjects:
|
||||
class PskBuildResult(object):
|
||||
def __init__(self):
|
||||
self.psk = None
|
||||
self.warnings = []
|
||||
self.warnings: List[str] = []
|
||||
|
||||
|
||||
def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
@ -72,9 +76,9 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
psk = Psk()
|
||||
bones = []
|
||||
|
||||
if armature_object is None:
|
||||
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
|
||||
# that a PSK file must have at least one bone.
|
||||
if armature_object is None or len(armature_object.data.bones) == 0:
|
||||
# If the mesh has no armature object or no bones, simply assign it a dummy bone at the root to satisfy the
|
||||
# requirement that a PSK file must have at least one bone.
|
||||
psk_bone = Psk.Bone()
|
||||
psk_bone.name = bytes('root', encoding='windows-1252')
|
||||
psk_bone.flags = 0
|
||||
@ -135,21 +139,25 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
psk.bones.append(psk_bone)
|
||||
|
||||
# MATERIALS
|
||||
material_names = options.material_names
|
||||
|
||||
for material_name in material_names:
|
||||
for material in options.materials:
|
||||
psk_material = Psk.Material()
|
||||
try:
|
||||
psk_material.name = bytes(material_name, encoding='windows-1252')
|
||||
psk_material.name = bytes(material.name, encoding='windows-1252')
|
||||
except UnicodeEncodeError:
|
||||
raise RuntimeError(f'Material name "{material_name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
raise RuntimeError(f'Material name "{material.name}" contains characters that cannot be encoded in the Windows-1252 codepage')
|
||||
psk_material.texture_index = len(psk.materials)
|
||||
psk_material.poly_flags = triangle_type_and_bit_flags_to_poly_flags(material.psk.mesh_triangle_type,
|
||||
material.psk.mesh_triangle_bit_flags)
|
||||
psk.materials.append(psk_material)
|
||||
|
||||
context.window_manager.progress_begin(0, len(input_objects.mesh_objects))
|
||||
|
||||
material_names = [m.name for m in options.materials]
|
||||
|
||||
for object_index, input_mesh_object in enumerate(input_objects.mesh_objects):
|
||||
|
||||
should_flip_normals = False
|
||||
|
||||
# MATERIALS
|
||||
material_indices = [material_names.index(material_slot.material.name) for material_slot in input_mesh_object.material_slots]
|
||||
|
||||
@ -177,8 +185,16 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
mesh_object.matrix_world = input_mesh_object.matrix_world
|
||||
|
||||
scale = (input_mesh_object.scale.x, input_mesh_object.scale.y, input_mesh_object.scale.z)
|
||||
if any(map(lambda x: x < 0, scale)):
|
||||
result.warnings.append(f'Mesh "{input_mesh_object.name}" has negative scaling which may result in inverted normals.')
|
||||
|
||||
# Negative scaling in Blender results in inverted normals after the scale is applied. However, if the scale
|
||||
# is not applied, the normals will appear unaffected in the viewport. The evaluated mesh data used in the
|
||||
# export will have the scale applied, but this behavior is not obvious to the user.
|
||||
#
|
||||
# In order to have the exporter be as WYSIWYG as possible, we need to check for negative scaling and invert
|
||||
# the normals if necessary. If two axes have negative scaling and the third has positive scaling, the
|
||||
# normals will be correct. We can detect this by checking if the number of negative scaling axes is odd. If
|
||||
# it is, we need to invert the normals of the mesh by swapping the order of the vertices in each face.
|
||||
should_flip_normals = sum(1 for x in scale if x < 0) % 2 == 1
|
||||
|
||||
# Copy the vertex groups
|
||||
for vertex_group in input_mesh_object.vertex_groups:
|
||||
@ -207,11 +223,11 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
# Build a list of non-unique wedges.
|
||||
wedges = []
|
||||
for loop_index, loop in enumerate(mesh_data.loops):
|
||||
wedge = Psk.Wedge()
|
||||
wedge.point_index = loop.vertex_index + vertex_offset
|
||||
wedge.u, wedge.v = uv_layer[loop_index].uv
|
||||
wedge.v = 1.0 - wedge.v
|
||||
wedges.append(wedge)
|
||||
wedges.append(Psk.Wedge(
|
||||
point_index=loop.vertex_index + vertex_offset,
|
||||
u=uv_layer[loop_index].uv[0],
|
||||
v=1.0 - uv_layer[loop_index].uv[1]
|
||||
))
|
||||
|
||||
# Assign material indices to the wedges.
|
||||
for triangle in mesh_data.loop_triangles:
|
||||
@ -219,8 +235,8 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
wedges[loop_index].material_index = material_indices[triangle.material_index]
|
||||
|
||||
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
|
||||
wedge_indices = {}
|
||||
loop_wedge_indices = [-1] * len(mesh_data.loops)
|
||||
wedge_indices = dict()
|
||||
loop_wedge_indices = np.full(len(mesh_data.loops), -1)
|
||||
for loop_index, wedge in enumerate(wedges):
|
||||
wedge_hash = hash(wedge)
|
||||
if wedge_hash in wedge_indices:
|
||||
@ -233,6 +249,7 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
|
||||
# FACES
|
||||
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
|
||||
psk_face_start_index = len(psk.faces)
|
||||
for f in mesh_data.loop_triangles:
|
||||
face = Psk.Face()
|
||||
face.material_index = material_indices[f.material_index]
|
||||
@ -242,6 +259,11 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
|
||||
face.smoothing_groups = poly_groups[f.polygon_index]
|
||||
psk.faces.append(face)
|
||||
|
||||
if should_flip_normals:
|
||||
# Invert the normals of the faces.
|
||||
for face in psk.faces[psk_face_start_index:]:
|
||||
face.wedge_indices[0], face.wedge_indices[2] = face.wedge_indices[2], face.wedge_indices[0]
|
||||
|
||||
# WEIGHTS
|
||||
if armature_object is not None:
|
||||
armature_data = typing.cast(Armature, armature_object.data)
|
||||
|
@ -5,11 +5,11 @@ from ..data import *
|
||||
|
||||
class Psk(object):
|
||||
class Wedge(object):
|
||||
def __init__(self):
|
||||
self.point_index: int = 0
|
||||
self.u: float = 0.0
|
||||
self.v: float = 0.0
|
||||
self.material_index: int = 0
|
||||
def __init__(self, point_index: int, u: float, v: float, material_index: int = 0):
|
||||
self.point_index: int = point_index
|
||||
self.u: float = u
|
||||
self.v: float = v
|
||||
self.material_index = material_index
|
||||
|
||||
def __hash__(self):
|
||||
return hash(f'{self.point_index}-{self.u}-{self.v}-{self.material_index}')
|
||||
|
@ -20,19 +20,19 @@ def is_bone_filter_mode_item_available(context, identifier):
|
||||
def populate_material_list(mesh_objects, material_list):
|
||||
material_list.clear()
|
||||
|
||||
material_names = []
|
||||
materials = []
|
||||
for mesh_object in mesh_objects:
|
||||
for i, material_slot in enumerate(mesh_object.material_slots):
|
||||
material = material_slot.material
|
||||
# TODO: put this in the poll arg?
|
||||
if material is None:
|
||||
raise RuntimeError('Material slot cannot be empty (index ' + str(i) + ')')
|
||||
if material.name not in material_names:
|
||||
material_names.append(material.name)
|
||||
if material not in materials:
|
||||
materials.append(material)
|
||||
|
||||
for index, material_name in enumerate(material_names):
|
||||
for index, material in enumerate(materials):
|
||||
m = material_list.add()
|
||||
m.material_name = material_name
|
||||
m.material = material
|
||||
m.index = index
|
||||
|
||||
|
||||
@ -51,7 +51,7 @@ class PSK_OT_material_list_move_up(Operator):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg.material_list.move(pg.material_list_index, pg.material_list_index - 1)
|
||||
pg.material_list_index -= 1
|
||||
return {"FINISHED"}
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSK_OT_material_list_move_down(Operator):
|
||||
@ -69,7 +69,7 @@ class PSK_OT_material_list_move_down(Operator):
|
||||
pg = getattr(context.scene, 'psk_export')
|
||||
pg.material_list.move(pg.material_list_index, pg.material_list_index + 1)
|
||||
pg.material_list_index += 1
|
||||
return {"FINISHED"}
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSK_OT_export(Operator, ExportHelper):
|
||||
@ -159,9 +159,9 @@ class PSK_OT_export(Operator, ExportHelper):
|
||||
options.bone_filter_mode = pg.bone_filter_mode
|
||||
options.bone_collection_indices = [x.index for x in pg.bone_collection_list if x.is_selected]
|
||||
options.use_raw_mesh_data = pg.use_raw_mesh_data
|
||||
options.material_names = [m.material_name for m in pg.material_list]
|
||||
options.materials = [m.material for m in pg.material_list]
|
||||
options.should_enforce_bone_name_restrictions = pg.should_enforce_bone_name_restrictions
|
||||
|
||||
|
||||
try:
|
||||
result = build_psk(context, options)
|
||||
for warning in result.warnings:
|
||||
|
@ -1,18 +1,19 @@
|
||||
from bpy.props import EnumProperty, CollectionProperty, IntProperty, BoolProperty, StringProperty
|
||||
from bpy.types import PropertyGroup
|
||||
from bpy.props import EnumProperty, CollectionProperty, IntProperty, BoolProperty, PointerProperty
|
||||
from bpy.types import PropertyGroup, Material
|
||||
|
||||
from ...types import PSX_PG_bone_collection_list_item
|
||||
|
||||
empty_set = set()
|
||||
|
||||
class PSK_PG_material_list_item(PropertyGroup):
|
||||
material_name: StringProperty()
|
||||
material: PointerProperty(type=Material)
|
||||
index: IntProperty()
|
||||
|
||||
|
||||
class PSK_PG_export(PropertyGroup):
|
||||
bone_filter_mode: EnumProperty(
|
||||
name='Bone Filter',
|
||||
options=set(),
|
||||
options=empty_set,
|
||||
description='',
|
||||
items=(
|
||||
('ALL', 'All', 'All bones will be exported'),
|
||||
|
@ -4,7 +4,7 @@ from bpy.types import UIList
|
||||
class PSK_UL_materials(UIList):
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
row = layout.row()
|
||||
row.label(text=str(getattr(item, 'material_name')), icon='MATERIAL')
|
||||
row.prop(item.material, 'name', text='', emboss=False, icon_value=layout.icon(item.material))
|
||||
|
||||
|
||||
classes = (
|
||||
|
@ -2,7 +2,7 @@ import os
|
||||
import sys
|
||||
|
||||
from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatProperty
|
||||
from bpy.types import Operator
|
||||
from bpy.types import Operator, FileHandler, Context
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
|
||||
from ..importer import PskImportOptions, import_psk
|
||||
@ -11,6 +11,17 @@ from ..reader import read_psk
|
||||
empty_set = set()
|
||||
|
||||
|
||||
class PSK_FH_import(FileHandler):
|
||||
bl_idname = 'PSK_FH_import'
|
||||
bl_label = 'File handler for Unreal PSK/PSKX import'
|
||||
bl_import_operator = 'import_scene.psk'
|
||||
bl_file_extensions = '.psk;.pskx'
|
||||
|
||||
@classmethod
|
||||
def poll_drop(cls, context: Context):
|
||||
return context.area and context.area.type == 'VIEW_3D'
|
||||
|
||||
|
||||
class PSK_OT_import(Operator, ImportHelper):
|
||||
bl_idname = 'import_scene.psk'
|
||||
bl_label = 'Import'
|
||||
@ -27,8 +38,8 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
should_import_vertex_colors: BoolProperty(
|
||||
default=True,
|
||||
options=empty_set,
|
||||
name='Vertex Colors',
|
||||
description='Import vertex colors from PSKX files, if available'
|
||||
name='Import Vertex Colors',
|
||||
description='Import vertex colors, if available'
|
||||
)
|
||||
vertex_color_space: EnumProperty(
|
||||
name='Vertex Color Space',
|
||||
@ -42,13 +53,13 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
)
|
||||
should_import_vertex_normals: BoolProperty(
|
||||
default=True,
|
||||
name='Vertex Normals',
|
||||
name='Import Vertex Normals',
|
||||
options=empty_set,
|
||||
description='Import vertex normals, if available'
|
||||
)
|
||||
should_import_extra_uvs: BoolProperty(
|
||||
default=True,
|
||||
name='Extra UVs',
|
||||
name='Import Extra UVs',
|
||||
options=empty_set,
|
||||
description='Import extra UV maps, if available'
|
||||
)
|
||||
@ -63,12 +74,6 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
name='Import Materials',
|
||||
options=empty_set,
|
||||
)
|
||||
should_reuse_materials: BoolProperty(
|
||||
default=True,
|
||||
name='Reuse Materials',
|
||||
options=empty_set,
|
||||
description='Existing materials with matching names will be reused when available'
|
||||
)
|
||||
should_import_skeleton: BoolProperty(
|
||||
default=True,
|
||||
name='Import Skeleton',
|
||||
@ -82,14 +87,20 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
soft_min=1.0,
|
||||
name='Bone Length',
|
||||
options=empty_set,
|
||||
subtype='DISTANCE',
|
||||
description='Length of the bones'
|
||||
)
|
||||
should_import_shape_keys: BoolProperty(
|
||||
default=True,
|
||||
name='Shape Keys',
|
||||
name='Import Shape Keys',
|
||||
options=empty_set,
|
||||
description='Import shape keys, if available'
|
||||
)
|
||||
scale: FloatProperty(
|
||||
name='Scale',
|
||||
default=1.0,
|
||||
soft_min=0.0,
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
psk = read_psk(self.filepath)
|
||||
@ -105,6 +116,11 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
options.bone_length = self.bone_length
|
||||
options.should_import_materials = self.should_import_materials
|
||||
options.should_import_shape_keys = self.should_import_shape_keys
|
||||
options.scale = self.scale
|
||||
|
||||
if not options.should_import_mesh and not options.should_import_skeleton:
|
||||
self.report({'ERROR'}, 'Nothing to import')
|
||||
return {'CANCELLED'}
|
||||
|
||||
result = import_psk(psk, context, options)
|
||||
|
||||
@ -119,26 +135,42 @@ class PSK_OT_import(Operator, ImportHelper):
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.prop(self, 'should_import_materials')
|
||||
layout.prop(self, 'should_import_mesh')
|
||||
row = layout.column()
|
||||
row.use_property_split = True
|
||||
row.use_property_decorate = False
|
||||
if self.should_import_mesh:
|
||||
row.prop(self, 'should_import_vertex_normals')
|
||||
row.prop(self, 'should_import_extra_uvs')
|
||||
row.prop(self, 'should_import_vertex_colors')
|
||||
|
||||
row = layout.row()
|
||||
|
||||
col = row.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(self, 'scale')
|
||||
|
||||
mesh_header, mesh_panel = layout.panel('mesh_panel_id', default_closed=False)
|
||||
mesh_header.prop(self, 'should_import_mesh')
|
||||
|
||||
if mesh_panel and self.should_import_mesh:
|
||||
row = mesh_panel.row()
|
||||
col = row.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(self, 'should_import_materials', text='Materials')
|
||||
col.prop(self, 'should_import_vertex_normals', text='Vertex Normals')
|
||||
col.prop(self, 'should_import_extra_uvs', text='Extra UVs')
|
||||
col.prop(self, 'should_import_vertex_colors', text='Vertex Colors')
|
||||
if self.should_import_vertex_colors:
|
||||
row.prop(self, 'vertex_color_space')
|
||||
row.prop(self, 'should_import_shape_keys')
|
||||
layout.prop(self, 'should_import_skeleton')
|
||||
row = layout.column()
|
||||
row.use_property_split = True
|
||||
row.use_property_decorate = False
|
||||
if self.should_import_skeleton:
|
||||
row.prop(self, 'bone_length')
|
||||
col.prop(self, 'vertex_color_space')
|
||||
col.prop(self, 'should_import_shape_keys', text='Shape Keys')
|
||||
|
||||
skeleton_header, skeleton_panel = layout.panel('skeleton_panel_id', default_closed=False)
|
||||
skeleton_header.prop(self, 'should_import_skeleton')
|
||||
|
||||
if skeleton_panel and self.should_import_skeleton:
|
||||
row = skeleton_panel.row()
|
||||
col = row.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(self, 'bone_length')
|
||||
|
||||
|
||||
classes = (
|
||||
PSK_OT_import,
|
||||
PSK_FH_import,
|
||||
)
|
||||
|
@ -1,4 +1,3 @@
|
||||
from math import inf
|
||||
from typing import Optional, List
|
||||
|
||||
import bmesh
|
||||
@ -8,6 +7,7 @@ from bpy.types import VertexGroup
|
||||
from mathutils import Quaternion, Vector, Matrix
|
||||
|
||||
from .data import Psk
|
||||
from .properties import poly_flags_to_triangle_type_and_bit_flags
|
||||
from ..helpers import rgb_to_srgb, is_bdk_addon_loaded
|
||||
|
||||
|
||||
@ -17,19 +17,20 @@ class PskImportOptions:
|
||||
self.should_import_mesh = True
|
||||
self.should_reuse_materials = True
|
||||
self.should_import_vertex_colors = True
|
||||
self.vertex_color_space = 'sRGB'
|
||||
self.vertex_color_space = 'SRGB'
|
||||
self.should_import_vertex_normals = True
|
||||
self.should_import_extra_uvs = True
|
||||
self.should_import_skeleton = True
|
||||
self.should_import_shape_keys = True
|
||||
self.bone_length = 1.0
|
||||
self.should_import_materials = True
|
||||
self.scale = 1.0
|
||||
|
||||
|
||||
class ImportBone:
|
||||
"""
|
||||
'''
|
||||
Intermediate bone type for the purpose of construction.
|
||||
"""
|
||||
'''
|
||||
def __init__(self, index: int, psk_bone: Psk.Bone):
|
||||
self.index: int = index
|
||||
self.psk_bone: Psk.Bone = psk_bone
|
||||
@ -52,6 +53,7 @@ class PskImportResult:
|
||||
def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
result = PskImportResult()
|
||||
armature_object = None
|
||||
mesh_object = None
|
||||
|
||||
if options.should_import_skeleton:
|
||||
# ARMATURE
|
||||
@ -133,6 +135,9 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
else:
|
||||
# Just create a blank material.
|
||||
material = bpy.data.materials.new(material_name)
|
||||
mesh_triangle_type, mesh_triangle_bit_flags = poly_flags_to_triangle_type_and_bit_flags(psk_material.poly_flags)
|
||||
material.psk.mesh_triangle_type = mesh_triangle_type
|
||||
material.psk.mesh_triangle_bit_flags = mesh_triangle_bit_flags
|
||||
material.use_nodes = True
|
||||
mesh_data.materials.append(material)
|
||||
|
||||
@ -144,6 +149,7 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
|
||||
bm.verts.ensure_lookup_table()
|
||||
|
||||
# FACES
|
||||
invalid_face_indices = set()
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
point_indices = map(lambda i: psk.wedges[i].point_index, reversed(face.wedge_indices))
|
||||
@ -164,61 +170,59 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
bm.to_mesh(mesh_data)
|
||||
|
||||
# TEXTURE COORDINATES
|
||||
data_index = 0
|
||||
uv_layer_data_index = 0
|
||||
uv_layer = mesh_data.uv_layers.new(name='VTXW0000')
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in invalid_face_indices:
|
||||
continue
|
||||
face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)]
|
||||
for wedge in face_wedges:
|
||||
uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v
|
||||
data_index += 1
|
||||
uv_layer.data[uv_layer_data_index].uv = wedge.u, 1.0 - wedge.v
|
||||
uv_layer_data_index += 1
|
||||
|
||||
# EXTRA UVS
|
||||
if psk.has_extra_uvs and options.should_import_extra_uvs:
|
||||
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
|
||||
wedge_index_offset = 0
|
||||
for extra_uv_index in range(extra_uv_channel_count):
|
||||
data_index = 0
|
||||
uv_layer_data_index = 0
|
||||
uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}')
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in invalid_face_indices:
|
||||
continue
|
||||
for wedge_index in reversed(face.wedge_indices):
|
||||
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
|
||||
uv_layer.data[data_index].uv = u, 1.0 - v
|
||||
data_index += 1
|
||||
uv_layer.data[uv_layer_data_index].uv = u, 1.0 - v
|
||||
uv_layer_data_index += 1
|
||||
wedge_index_offset += len(psk.wedges)
|
||||
|
||||
# VERTEX COLORS
|
||||
if psk.has_vertex_colors and options.should_import_vertex_colors:
|
||||
size = (len(psk.points), 4)
|
||||
vertex_colors = np.full(size, inf)
|
||||
vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR')
|
||||
ambiguous_vertex_color_point_indices = []
|
||||
# Convert vertex colors to sRGB if necessary.
|
||||
psk_vertex_colors = np.zeros((len(psk.vertex_colors), 4))
|
||||
for vertex_color_index in range(len(psk.vertex_colors)):
|
||||
psk_vertex_colors[vertex_color_index,:] = psk.vertex_colors[vertex_color_index].normalized()
|
||||
match options.vertex_color_space:
|
||||
case 'SRGBA':
|
||||
for i in range(psk_vertex_colors.shape[0]):
|
||||
psk_vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), psk_vertex_colors[i, :3]))
|
||||
case _:
|
||||
pass
|
||||
|
||||
for wedge_index, wedge in enumerate(psk.wedges):
|
||||
point_index = wedge.point_index
|
||||
psk_vertex_color = psk.vertex_colors[wedge_index].normalized()
|
||||
if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color:
|
||||
ambiguous_vertex_color_point_indices.append(point_index)
|
||||
else:
|
||||
vertex_colors[point_index] = psk_vertex_color
|
||||
# Map the PSK vertex colors to the face corners.
|
||||
face_count = len(psk.faces) - len(invalid_face_indices)
|
||||
face_corner_colors = np.full((face_count * 3, 4), 1.0)
|
||||
face_corner_color_index = 0
|
||||
for face_index, face in enumerate(psk.faces):
|
||||
if face_index in invalid_face_indices:
|
||||
continue
|
||||
for wedge_index in reversed(face.wedge_indices):
|
||||
face_corner_colors[face_corner_color_index] = psk_vertex_colors[wedge_index]
|
||||
face_corner_color_index += 1
|
||||
|
||||
if options.vertex_color_space == 'SRGBA':
|
||||
for i in range(vertex_colors.shape[0]):
|
||||
vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3]))
|
||||
|
||||
for loop_index, loop in enumerate(mesh_data.loops):
|
||||
vertex_color = vertex_colors[loop.vertex_index]
|
||||
if vertex_color is not None:
|
||||
vertex_color_data.data[loop_index].color = vertex_color
|
||||
else:
|
||||
vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0
|
||||
|
||||
if len(ambiguous_vertex_color_point_indices) > 0:
|
||||
result.warnings.append(
|
||||
f'{len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.')
|
||||
# Create the vertex color attribute.
|
||||
face_corner_color_attribute = mesh_data.attributes.new(name='VERTEXCOLOR', type='FLOAT_COLOR', domain='CORNER')
|
||||
face_corner_color_attribute.data.foreach_set('color', face_corner_colors.flatten())
|
||||
|
||||
# VERTEX NORMALS
|
||||
if psk.has_vertex_normals and options.should_import_vertex_normals:
|
||||
@ -267,6 +271,9 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
armature_modifier.object = armature_object
|
||||
mesh_object.parent = armature_object
|
||||
|
||||
root_object = armature_object if options.should_import_skeleton else mesh_object
|
||||
root_object.scale = (options.scale, options.scale, options.scale)
|
||||
|
||||
try:
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
except:
|
||||
|
48
io_scene_psk_psa/psk/properties.py
Normal file
48
io_scene_psk_psa/psk/properties.py
Normal file
@ -0,0 +1,48 @@
|
||||
from bpy.props import EnumProperty
|
||||
from bpy.types import PropertyGroup
|
||||
|
||||
mesh_triangle_types_items = (
|
||||
('NORMAL', 'Normal', 'Normal one-sided', 0),
|
||||
('NORMAL_TWO_SIDED', 'Normal Two-Sided', 'Normal but two-sided', 1),
|
||||
('TRANSLUCENT', 'Translucent', 'Translucent two-sided', 2),
|
||||
('MASKED', 'Masked', 'Masked two-sided', 3),
|
||||
('MODULATE', 'Modulate', 'Modulation blended two-sided', 4),
|
||||
('PLACEHOLDER', 'Placeholder', 'Placeholder triangle for positioning weapon. Invisible', 8),
|
||||
)
|
||||
|
||||
mesh_triangle_bit_flags_items = (
|
||||
('UNLIT', 'Unlit', 'Full brightness, no lighting', 16),
|
||||
('FLAT', 'Flat', 'Flat surface, don\'t do bMeshCurvy thing', 32),
|
||||
('ENVIRONMENT', 'Environment', 'Environment mapped', 64),
|
||||
('NO_SMOOTH', 'No Smooth', 'No bilinear filtering on this poly\'s texture', 128),
|
||||
)
|
||||
|
||||
class PSX_PG_material(PropertyGroup):
|
||||
mesh_triangle_type: EnumProperty(items=mesh_triangle_types_items, name='Triangle Type')
|
||||
mesh_triangle_bit_flags: EnumProperty(items=mesh_triangle_bit_flags_items, name='Triangle Bit Flags',
|
||||
options={'ENUM_FLAG'})
|
||||
|
||||
mesh_triangle_types_items_dict = {item[0]: item[3] for item in mesh_triangle_types_items}
|
||||
mesh_triangle_bit_flags_items_dict = {item[0]: item[3] for item in mesh_triangle_bit_flags_items}
|
||||
|
||||
|
||||
def triangle_type_and_bit_flags_to_poly_flags(mesh_triangle_type: str, mesh_triangle_bit_flags: set[str]) -> int:
|
||||
poly_flags = 0
|
||||
poly_flags |= mesh_triangle_types_items_dict.get(mesh_triangle_type, 0)
|
||||
for flag in mesh_triangle_bit_flags:
|
||||
poly_flags |= mesh_triangle_bit_flags_items_dict.get(flag, 0)
|
||||
return poly_flags
|
||||
|
||||
|
||||
def poly_flags_to_triangle_type_and_bit_flags(poly_flags: int) -> (str, set[str]):
|
||||
try:
|
||||
triangle_type = next(item[0] for item in mesh_triangle_types_items if item[3] == (poly_flags & 15))
|
||||
except StopIteration:
|
||||
triangle_type = 'NORMAL'
|
||||
triangle_bit_flags = {item[0] for item in mesh_triangle_bit_flags_items if item[3] & poly_flags}
|
||||
return triangle_type, triangle_bit_flags
|
||||
|
||||
|
||||
classes = (
|
||||
PSX_PG_material,
|
||||
)
|
@ -23,7 +23,7 @@ def _read_material_references(path: str) -> List[str]:
|
||||
return []
|
||||
# Do a crude regex match to find the Material list entries.
|
||||
contents = property_file_path.read_text()
|
||||
pattern = r"Material\s*=\s*([^\s^,]+)"
|
||||
pattern = r'Material\s*=\s*([^\s^,]+)'
|
||||
return re.findall(pattern, contents)
|
||||
|
||||
|
||||
|
28
io_scene_psk_psa/psk/ui.py
Normal file
28
io_scene_psk_psa/psk/ui.py
Normal file
@ -0,0 +1,28 @@
|
||||
from bpy.types import Panel
|
||||
|
||||
|
||||
class PSK_PT_material(Panel):
|
||||
bl_label = 'PSK Material'
|
||||
bl_idname = 'PSK_PT_material'
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_context = 'material'
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.material is not None
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False
|
||||
material = context.material
|
||||
layout.prop(material.psk, 'mesh_triangle_type')
|
||||
col = layout.column()
|
||||
col.prop(material.psk, 'mesh_triangle_bit_flags', expand=True, text='Flags')
|
||||
|
||||
|
||||
classes = (
|
||||
PSK_PT_material,
|
||||
)
|
@ -6,7 +6,7 @@ from ..data import Section, Vector3
|
||||
|
||||
MAX_WEDGE_COUNT = 65536
|
||||
MAX_POINT_COUNT = 4294967296
|
||||
MAX_BONE_COUNT = 256
|
||||
MAX_BONE_COUNT = 2147483647
|
||||
MAX_MATERIAL_COUNT = 256
|
||||
|
||||
|
||||
|
@ -51,5 +51,5 @@ classes = (
|
||||
PSX_PG_action_export,
|
||||
PSX_PG_bone_collection_list_item,
|
||||
PSX_UL_bone_collection_list,
|
||||
PSX_PT_action
|
||||
PSX_PT_action,
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user