mirror of
https://github.com/DarklightGames/io_scene_psk_psa.git
synced 2024-11-27 16:10:48 +01:00
Merge branch 'bdk'
# Conflicts: # README.md # io_scene_psk_psa/__init__.py # io_scene_psk_psa/psk/data.py # io_scene_psk_psa/psk/importer.py # io_scene_psk_psa/psk/reader.py
This commit is contained in:
commit
9e18e40387
42
README.md
42
README.md
@ -1,4 +1,9 @@
|
||||
This Blender 2.90+ add-on allows you to import and export meshes and animations to and from the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats) used in many versions of the Unreal Engine.
|
||||
[![Blender](https://img.shields.io/badge/Blender->=3.4-blue?logo=blender&logoColor=white)](https://www.blender.org/download/ "Download Blender")
|
||||
[![GitHub release](https://img.shields.io/github/release/DarklightGames/io_scene_psk_psa?include_prereleases=&sort=semver&color=blue)](https://github.com/DarklightGames/io_scene_psk_psa/releases/)
|
||||
|
||||
This Blender add-on allows you to import and export meshes and animations to and from the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats) used in many version of the Unreal Engine.
|
||||
|
||||
> **NOTE**: This addon requires Blender 3.4+. If this is not available to you, install version [4.3.0](https://github.com/DarklightGames/io_scene_psk_psa/releases/tag/4.3.0), as it has a minimum Blender version of 2.9.
|
||||
|
||||
# Features
|
||||
* Full PSK/PSA import and export capabilities.
|
||||
@ -11,36 +16,33 @@ This Blender 2.90+ add-on allows you to import and export meshes and animations
|
||||
|
||||
# Installation
|
||||
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
|
||||
2. Open Blender 2.90 or later.
|
||||
3. Navigate to the Blender Preferences (Edit > Preferences).
|
||||
4. Select the "Add-ons" tab.
|
||||
5. Click the "Install..." button.
|
||||
6. Select the .zip file that you downloaded earlier and click "Install Add-on".
|
||||
7. Enable the newly added "Import-Export: PSK/PSA Importer/Exporter" addon.
|
||||
2. Open Blender 3.4.0 or later.
|
||||
3. Navigate to the Blender Preferences (`Edit` > `Preferences`).
|
||||
4. Select the `Add-ons` tab.
|
||||
5. Click the `Install...` button.
|
||||
6. Select the .zip file that you downloaded earlier and click `Install Add-on`.
|
||||
7. Enable the newly added `Import-Export: PSK/PSA Importer/Exporter` addon.
|
||||
|
||||
# Usage
|
||||
## Exporting a PSK
|
||||
1. Select the mesh objects you wish to export.
|
||||
3. Navigate to File > Export > Unreal PSK (.psk)
|
||||
4. Enter the file name and click "Export".
|
||||
2. Navigate to `File` > `Export` > `Unreal PSK (.psk)`.
|
||||
3. Enter the file name and click `Export`.
|
||||
|
||||
## Importing a PSK/PSKX
|
||||
1. Navigate to File > Import > Unreal PSK (.psk/.pskx)
|
||||
2. Select the PSK file you want to import and click "Import".
|
||||
1. Navigate to `File` > `Import` > `Unreal PSK (.psk/.pskx)`.
|
||||
2. Select the PSK file you want to import and click `Import`.
|
||||
|
||||
## Exporting a PSA
|
||||
1. Select the armature objects you wish to export.
|
||||
2. Navigate to File > Export > Unreal PSA (.psa)
|
||||
3. Enter the file name and click "Export".
|
||||
2. Navigate to `File` > `Export` > `Unreal PSA (.psa)`.
|
||||
3. Enter the file name and click `Export`.
|
||||
|
||||
## Importing a PSA
|
||||
1. Select the armature object that you wish you import actions to.
|
||||
2. Navigate to the Object Data Properties tab of the Properties editor.
|
||||
3. Navigate to the PSA Import panel.
|
||||
4. Click "Select PSA File".
|
||||
5. Select the PSA file that you want to import animations from and click "Select".
|
||||
6. In the Actions box, select which animations you want to import.
|
||||
7. Click "Import".
|
||||
1. Select an armature that you want import animations for.
|
||||
2. Navigate to `File` > `Import` > `Unreal PSA (.psa)`.
|
||||
3. Select the PSA file you want to import.
|
||||
4. Select the sequences that you want to import and click `Import`.
|
||||
|
||||
# FAQ
|
||||
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
||||
|
@ -1,9 +1,8 @@
|
||||
bl_info = {
|
||||
"name": "PSK/PSA Importer/Exporter",
|
||||
"author": "Colin Basnett, Yurii Ti",
|
||||
"version": (4, 3, 0),
|
||||
"blender": (2, 90, 0),
|
||||
# "location": "File > Export > PSK Export (.psk)",
|
||||
"version": (5, 0, 0),
|
||||
"blender": (3, 4, 0),
|
||||
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
||||
"warning": "",
|
||||
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
|
||||
@ -44,9 +43,34 @@ else:
|
||||
from .psa import importer as psa_importer
|
||||
|
||||
import bpy
|
||||
from bpy.props import PointerProperty
|
||||
from bpy.props import CollectionProperty, PointerProperty, StringProperty, IntProperty
|
||||
from bpy.types import AddonPreferences, PropertyGroup
|
||||
|
||||
classes = (psx_types.classes +
|
||||
|
||||
class MaterialPathPropertyGroup(PropertyGroup):
|
||||
path: StringProperty(name='Path', subtype='DIR_PATH')
|
||||
|
||||
|
||||
class PskPsaAddonPreferences(AddonPreferences):
|
||||
bl_idname = __name__
|
||||
|
||||
material_path_list: CollectionProperty(type=MaterialPathPropertyGroup)
|
||||
material_path_index: IntProperty()
|
||||
|
||||
def draw_filter(self, context, layout):
|
||||
pass
|
||||
|
||||
def draw(self, context: bpy.types.Context):
|
||||
self.layout.label(text='Material Paths')
|
||||
row = self.layout.row()
|
||||
row.template_list('PSX_UL_MaterialPathList', '', self, 'material_path_list', self, 'material_path_index')
|
||||
column = row.column()
|
||||
column.operator(psx_types.PSX_OT_MaterialPathAdd.bl_idname, icon='ADD', text='')
|
||||
column.operator(psx_types.PSX_OT_MaterialPathRemove.bl_idname, icon='REMOVE', text='')
|
||||
|
||||
|
||||
classes = ((MaterialPathPropertyGroup, PskPsaAddonPreferences) +
|
||||
psx_types.classes +
|
||||
psk_importer.classes +
|
||||
psk_exporter.classes +
|
||||
psa_exporter.classes +
|
||||
@ -65,26 +89,30 @@ def psa_export_menu_func(self, context):
|
||||
self.layout.operator(psa_exporter.PsaExportOperator.bl_idname, text='Unreal PSA (.psa)')
|
||||
|
||||
|
||||
def psa_import_menu_func(self, context):
|
||||
self.layout.operator(psa_importer.PsaImportOperator.bl_idname, text='Unreal PSA (.psa)')
|
||||
|
||||
|
||||
def register():
|
||||
for cls in classes:
|
||||
bpy.utils.register_class(cls)
|
||||
bpy.types.TOPBAR_MT_file_export.append(psk_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.append(psa_import_menu_func)
|
||||
bpy.types.Scene.psa_import = PointerProperty(type=psa_importer.PsaImportPropertyGroup)
|
||||
bpy.types.Scene.psk_import = PointerProperty(type=psk_importer.PskImportPropertyGroup)
|
||||
bpy.types.Scene.psa_export = PointerProperty(type=psa_exporter.PsaExportPropertyGroup)
|
||||
bpy.types.Scene.psk_export = PointerProperty(type=psk_exporter.PskExportPropertyGroup)
|
||||
|
||||
|
||||
def unregister():
|
||||
del bpy.types.Scene.psa_import
|
||||
del bpy.types.Scene.psk_import
|
||||
del bpy.types.Scene.psa_export
|
||||
del bpy.types.Scene.psk_export
|
||||
bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func)
|
||||
bpy.types.TOPBAR_MT_file_import.remove(psa_import_menu_func)
|
||||
for cls in reversed(classes):
|
||||
bpy.utils.unregister_class(cls)
|
||||
|
||||
|
39
io_scene_psk_psa/bdk.py
Normal file
39
io_scene_psk_psa/bdk.py
Normal file
@ -0,0 +1,39 @@
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class UReference:
|
||||
type_name: str
|
||||
package_name: str
|
||||
group_name: Optional[str]
|
||||
object_name: str
|
||||
|
||||
def __init__(self, type_name: str, package_name: str, object_name: str, group_name: Optional[str] = None):
|
||||
self.type_name = type_name
|
||||
self.package_name = package_name
|
||||
self.object_name = object_name
|
||||
self.group_name = group_name
|
||||
|
||||
@staticmethod
|
||||
def from_string(string: str) -> Optional['UReference']:
|
||||
if string == 'None':
|
||||
return None
|
||||
pattern = r'(\w+)\'([\w\.\d\-\_]+)\''
|
||||
match = re.match(pattern, string)
|
||||
if match is None:
|
||||
print(f'BAD REFERENCE STRING: {string}')
|
||||
return None
|
||||
type_name = match.group(1)
|
||||
object_name = match.group(2)
|
||||
pattern = r'([\w\d\-\_]+)'
|
||||
values = re.findall(pattern, object_name)
|
||||
package_name = values[0]
|
||||
object_name = values[-1]
|
||||
return UReference(type_name, package_name, object_name, group_name=None)
|
||||
|
||||
def __repr__(self):
|
||||
s = f'{self.type_name}\'{self.package_name}'
|
||||
if self.group_name:
|
||||
s += f'.{self.group_name}'
|
||||
s += f'.{self.object_name}'
|
||||
return s
|
@ -1,10 +1,12 @@
|
||||
import datetime
|
||||
import re
|
||||
import typing
|
||||
from collections import Counter
|
||||
from typing import List, Iterable
|
||||
|
||||
import addon_utils
|
||||
import bpy.types
|
||||
from bpy.types import NlaStrip, Object
|
||||
from bpy.types import NlaStrip, Object, AnimData
|
||||
|
||||
|
||||
class Timer:
|
||||
@ -25,14 +27,14 @@ class Timer:
|
||||
return datetime.datetime.now() - self.start
|
||||
|
||||
|
||||
def rgb_to_srgb(c):
|
||||
def rgb_to_srgb(c: float):
|
||||
if c > 0.0031308:
|
||||
return 1.055 * (pow(c, (1.0 / 2.4))) - 0.055
|
||||
else:
|
||||
return 12.92 * c
|
||||
|
||||
|
||||
def get_nla_strips_in_timeframe(animation_data, frame_min, frame_max) -> List[NlaStrip]:
|
||||
def get_nla_strips_in_timeframe(animation_data: AnimData, frame_min: float, frame_max: float) -> List[NlaStrip]:
|
||||
if animation_data is None:
|
||||
return []
|
||||
strips = []
|
||||
@ -86,13 +88,6 @@ def populate_bone_group_list(armature_object: Object, bone_group_list: bpy.props
|
||||
item.is_selected = bone_group.name in selected_assigned_group_names if has_selected_groups else True
|
||||
|
||||
|
||||
def get_psa_sequence_name(action, should_use_original_sequence_name):
|
||||
if should_use_original_sequence_name and 'psa_sequence_name' in action:
|
||||
return action['psa_sequence_name']
|
||||
else:
|
||||
return action.name
|
||||
|
||||
|
||||
def check_bone_names(bone_names: Iterable[str]):
|
||||
pattern = re.compile(r'^[a-zA-Z\d_\- ]+$')
|
||||
invalid_bone_names = [x for x in bone_names if pattern.match(x) is None]
|
||||
@ -101,7 +96,7 @@ def check_bone_names(bone_names: Iterable[str]):
|
||||
f'Bone names must only contain letters, numbers, spaces, hyphens and underscores.')
|
||||
|
||||
|
||||
def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices: List[int]) -> List[str]:
|
||||
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_group_indices: List[int]) -> List[str]:
|
||||
"""
|
||||
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone groups.
|
||||
|
||||
@ -115,7 +110,8 @@ def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices:
|
||||
if armature_object is None or armature_object.type != 'ARMATURE':
|
||||
raise ValueError('An armature object must be supplied')
|
||||
|
||||
bones = armature_object.data.bones
|
||||
armature_data = typing.cast(bpy.types.Armature, armature_object.data)
|
||||
bones = armature_data.bones
|
||||
pose_bones = armature_object.pose.bones
|
||||
bone_names = [x.name for x in bones]
|
||||
|
||||
@ -174,3 +170,7 @@ def get_export_bone_names(armature_object, bone_filter_mode, bone_group_indices:
|
||||
f'Additional debugging information has been written to the console.')
|
||||
|
||||
return bone_names
|
||||
|
||||
|
||||
def is_bdk_addon_loaded():
|
||||
return addon_utils.check('bdk_addon')[1]
|
||||
|
@ -1,115 +1,39 @@
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from bpy.types import Action, Armature, Bone
|
||||
from bpy.types import Armature, Bone, Action
|
||||
|
||||
from .data import *
|
||||
from ..helpers import *
|
||||
|
||||
|
||||
class PsaBuildOptions(object):
|
||||
class PsaExportSequence:
|
||||
class NlaState:
|
||||
def __init__(self):
|
||||
self.action: Optional[Action] = None
|
||||
self.frame_start: int = 0
|
||||
self.frame_end: int = 0
|
||||
|
||||
def __init__(self):
|
||||
self.should_override_animation_data = False
|
||||
self.animation_data_override = None
|
||||
self.fps_source = 'SCENE'
|
||||
self.fps_custom = 30.0
|
||||
self.sequence_source = 'ACTIONS'
|
||||
self.actions = []
|
||||
self.marker_names = []
|
||||
self.bone_filter_mode = 'ALL'
|
||||
self.bone_group_indices = []
|
||||
self.should_use_original_sequence_names = False
|
||||
self.should_trim_timeline_marker_sequences = True
|
||||
self.should_ignore_bone_name_restrictions = False
|
||||
self.sequence_name_prefix = ''
|
||||
self.sequence_name_suffix = ''
|
||||
self.root_motion = False
|
||||
self.name: str = ''
|
||||
self.nla_state: PsaExportSequence.NlaState = PsaExportSequence.NlaState()
|
||||
self.fps: float = 30.0
|
||||
|
||||
|
||||
def get_sequence_fps(context, options: PsaBuildOptions, actions: Iterable[Action]) -> float:
|
||||
if options.fps_source == 'SCENE':
|
||||
return context.scene.render.fps
|
||||
if options.fps_source == 'CUSTOM':
|
||||
return options.fps_custom
|
||||
elif options.fps_source == 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
fps_list = []
|
||||
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
|
||||
fps = action['psa_sequence_fps']
|
||||
if type(fps) == int or type(fps) == float:
|
||||
fps_list.append(fps)
|
||||
if len(fps_list) > 0:
|
||||
return min(fps_list)
|
||||
else:
|
||||
# No valid action metadata to use, fallback to scene FPS
|
||||
return context.scene.render.fps
|
||||
else:
|
||||
raise RuntimeError(f'Invalid FPS source "{options.fps_source}"')
|
||||
|
||||
|
||||
def get_timeline_marker_sequence_frame_ranges(animation_data, context, options: PsaBuildOptions) -> Dict:
|
||||
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
||||
sequence_frame_ranges = dict()
|
||||
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
||||
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
|
||||
|
||||
for marker_name in options.marker_names:
|
||||
marker = context.scene.timeline_markers[marker_name]
|
||||
frame_min = marker.frame
|
||||
# Determine the final frame of the sequence based on the next marker.
|
||||
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
||||
marker_index = sorted_timeline_marker_names.index(marker_name)
|
||||
next_marker_index = marker_index + 1
|
||||
frame_max = 0
|
||||
if next_marker_index < len(sorted_timeline_markers):
|
||||
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
||||
frame_max = sorted_timeline_markers[next_marker_index].frame
|
||||
if options.should_trim_timeline_marker_sequences:
|
||||
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_max)
|
||||
if len(nla_strips) > 0:
|
||||
frame_max = min(frame_max, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
||||
frame_min = max(frame_min, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
||||
else:
|
||||
# No strips in between this marker and the next, just export this as a one-frame animation.
|
||||
frame_max = frame_min
|
||||
else:
|
||||
# There is no next marker.
|
||||
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
||||
for nla_track in animation_data.nla_tracks:
|
||||
if nla_track.mute:
|
||||
continue
|
||||
for strip in nla_track.strips:
|
||||
frame_max = max(frame_max, strip.frame_end)
|
||||
|
||||
if frame_min > frame_max:
|
||||
continue
|
||||
|
||||
sequence_frame_ranges[marker_name] = int(frame_min), int(frame_max)
|
||||
|
||||
return sequence_frame_ranges
|
||||
class PsaBuildOptions:
|
||||
def __init__(self):
|
||||
self.animation_data: Optional[AnimData] = None
|
||||
self.sequences: List[PsaExportSequence] = []
|
||||
self.bone_filter_mode: str = 'ALL'
|
||||
self.bone_group_indices: List[int] = []
|
||||
self.should_ignore_bone_name_restrictions: bool = False
|
||||
self.sequence_name_prefix: str = ''
|
||||
self.sequence_name_suffix: str = ''
|
||||
self.root_motion: bool = False
|
||||
|
||||
|
||||
def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
active_object = context.view_layer.objects.active
|
||||
|
||||
if active_object.type != 'ARMATURE':
|
||||
raise RuntimeError('Selected object must be an Armature')
|
||||
|
||||
if options.should_override_animation_data:
|
||||
animation_data_object = options.animation_data_override
|
||||
else:
|
||||
animation_data_object = active_object
|
||||
|
||||
animation_data = animation_data_object.animation_data
|
||||
|
||||
if animation_data is None:
|
||||
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||
|
||||
# Ensure that we actually have items that we are going to be exporting.
|
||||
if options.sequence_source == 'ACTIONS' and len(options.actions) == 0:
|
||||
raise RuntimeError('No actions were selected for export')
|
||||
elif options.sequence_source == 'TIMELINE_MARKERS' and len(options.marker_names) == 0:
|
||||
raise RuntimeError('No timeline markers were selected for export')
|
||||
|
||||
psa = Psa()
|
||||
|
||||
armature_object = active_object
|
||||
@ -177,72 +101,28 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
|
||||
psa.bones.append(psa_bone)
|
||||
|
||||
# Populate the export sequence list.
|
||||
class NlaState:
|
||||
def __init__(self):
|
||||
self.frame_min = 0
|
||||
self.frame_max = 0
|
||||
self.action = None
|
||||
|
||||
class ExportSequence:
|
||||
def __init__(self):
|
||||
self.name = ''
|
||||
self.nla_state = NlaState()
|
||||
self.fps = 30.0
|
||||
|
||||
export_sequences = []
|
||||
|
||||
if options.sequence_source == 'ACTIONS':
|
||||
for action in options.actions:
|
||||
if len(action.fcurves) == 0:
|
||||
continue
|
||||
export_sequence = ExportSequence()
|
||||
export_sequence.nla_state.action = action
|
||||
export_sequence.name = get_psa_sequence_name(action, options.should_use_original_sequence_names)
|
||||
frame_min, frame_max = [int(x) for x in action.frame_range]
|
||||
export_sequence.nla_state.frame_min = frame_min
|
||||
export_sequence.nla_state.frame_max = frame_max
|
||||
export_sequence.fps = get_sequence_fps(context, options, [action])
|
||||
export_sequences.append(export_sequence)
|
||||
pass
|
||||
elif options.sequence_source == 'TIMELINE_MARKERS':
|
||||
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, options)
|
||||
|
||||
for name, (frame_min, frame_max) in sequence_frame_ranges.items():
|
||||
export_sequence = ExportSequence()
|
||||
export_sequence.name = name
|
||||
export_sequence.nla_state.action = None
|
||||
export_sequence.nla_state.frame_min = frame_min
|
||||
export_sequence.nla_state.frame_max = frame_max
|
||||
|
||||
nla_strips_actions = set(
|
||||
map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, frame_min, frame_max)))
|
||||
export_sequence.fps = get_sequence_fps(context, options, nla_strips_actions)
|
||||
export_sequences.append(export_sequence)
|
||||
else:
|
||||
raise ValueError(f'Unhandled sequence source: {options.sequence_source}')
|
||||
|
||||
# Add prefixes and suffices to the names of the export sequences and strip whitespace.
|
||||
for export_sequence in export_sequences:
|
||||
for export_sequence in options.sequences:
|
||||
export_sequence.name = f'{options.sequence_name_prefix}{export_sequence.name}{options.sequence_name_suffix}'
|
||||
export_sequence.name = export_sequence.name.strip()
|
||||
|
||||
# Save the current action and frame so that we can restore the state once we are done.
|
||||
saved_frame_current = context.scene.frame_current
|
||||
saved_action = animation_data.action
|
||||
saved_action = options.animation_data.action
|
||||
|
||||
# Now build the PSA sequences.
|
||||
# We actually alter the timeline frame and simply record the resultant pose bone matrices.
|
||||
frame_start_index = 0
|
||||
|
||||
for export_sequence in export_sequences:
|
||||
for export_sequence in options.sequences:
|
||||
# Link the action to the animation data and update view layer.
|
||||
animation_data.action = export_sequence.nla_state.action
|
||||
options.animation_data.action = export_sequence.nla_state.action
|
||||
context.view_layer.update()
|
||||
|
||||
frame_min = export_sequence.nla_state.frame_min
|
||||
frame_max = export_sequence.nla_state.frame_max
|
||||
frame_count = frame_max - frame_min + 1
|
||||
frame_start = export_sequence.nla_state.frame_start
|
||||
frame_end = export_sequence.nla_state.frame_end
|
||||
frame_count = abs(frame_end - frame_start) + 1
|
||||
frame_step = 1 if frame_start < frame_end else -1
|
||||
|
||||
psa_sequence = Psa.Sequence()
|
||||
psa_sequence.name = bytes(export_sequence.name, encoding='windows-1252')
|
||||
@ -250,8 +130,11 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
psa_sequence.frame_start_index = frame_start_index
|
||||
psa_sequence.fps = export_sequence.fps
|
||||
|
||||
for frame in range(frame_count):
|
||||
context.scene.frame_set(frame_min + frame)
|
||||
frame = frame_start
|
||||
for _ in range(frame_count):
|
||||
context.scene.frame_set(frame)
|
||||
|
||||
frame += frame_step
|
||||
|
||||
for pose_bone in pose_bones:
|
||||
key = Psa.Key()
|
||||
@ -262,10 +145,11 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
||||
else:
|
||||
if options.root_motion:
|
||||
# Export root motion
|
||||
# Get the bone's pose matrix, taking the armature object's world matrix into account.
|
||||
pose_bone_matrix = armature_object.matrix_world @ pose_bone.matrix
|
||||
else:
|
||||
pose_bone_matrix = pose_bone.matrix
|
||||
# Use the bind pose matrix for the root bone.
|
||||
pose_bone_matrix = armature_data.bones[pose_bone.name].matrix_local
|
||||
|
||||
location = pose_bone_matrix.to_translation()
|
||||
rotation = pose_bone_matrix.to_quaternion().normalized()
|
||||
@ -292,7 +176,7 @@ def build_psa(context: bpy.types.Context, options: PsaBuildOptions) -> Psa:
|
||||
psa.sequences[export_sequence.name] = psa_sequence
|
||||
|
||||
# Restore the previous action & frame.
|
||||
animation_data.action = saved_action
|
||||
options.animation_data.action = saved_action
|
||||
context.scene.frame_set(saved_frame_current)
|
||||
|
||||
return psa
|
||||
|
@ -10,7 +10,7 @@ Use the PsaReader::get_sequence_keys to get the keys for a sequence.
|
||||
"""
|
||||
|
||||
|
||||
class Psa(object):
|
||||
class Psa:
|
||||
class Bone(Structure):
|
||||
_fields_ = [
|
||||
('name', c_char * 64),
|
||||
|
@ -1,14 +1,14 @@
|
||||
import fnmatch
|
||||
import sys
|
||||
from typing import Type
|
||||
from typing import Type, Dict
|
||||
|
||||
import bpy
|
||||
from bpy.props import BoolProperty, CollectionProperty, EnumProperty, FloatProperty, IntProperty, PointerProperty, \
|
||||
StringProperty
|
||||
from bpy.types import Action, Operator, PropertyGroup, UIList
|
||||
from bpy.types import Action, Operator, PropertyGroup, UIList, Context, Armature, TimelineMarker
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
|
||||
from .builder import PsaBuildOptions, build_psa
|
||||
from .builder import PsaBuildOptions, PsaExportSequence, build_psa
|
||||
from .data import *
|
||||
from ..helpers import *
|
||||
from ..types import BoneGroupListItem
|
||||
@ -38,23 +38,17 @@ class PsaExportActionListItem(PropertyGroup):
|
||||
action: PointerProperty(type=Action)
|
||||
name: StringProperty()
|
||||
is_selected: BoolProperty(default=False)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
is_pose_marker: BoolProperty(options={'HIDDEN'})
|
||||
|
||||
|
||||
class PsaExportTimelineMarkerListItem(PropertyGroup):
|
||||
marker_index: IntProperty()
|
||||
name: StringProperty()
|
||||
is_selected: BoolProperty(default=True)
|
||||
|
||||
|
||||
def update_action_names(context):
|
||||
pg = context.scene.psa_export
|
||||
for item in pg.action_list:
|
||||
action = item.action
|
||||
item.action_name = get_psa_sequence_name(action, pg.should_use_original_sequence_names)
|
||||
|
||||
|
||||
def should_use_original_sequence_names_updated(_, context):
|
||||
update_action_names(context)
|
||||
frame_start: IntProperty(options={'HIDDEN'})
|
||||
frame_end: IntProperty(options={'HIDDEN'})
|
||||
|
||||
|
||||
def psa_export_property_group_animation_data_override_poll(_context, obj):
|
||||
@ -69,7 +63,9 @@ class PsaExportPropertyGroup(PropertyGroup):
|
||||
name='Root Motion',
|
||||
options=empty_set,
|
||||
default=False,
|
||||
description='The root bone will be transformed as it appears in the scene',
|
||||
description='When enabled, the root bone will be transformed as it appears in the scene.\n\n'
|
||||
'You might want to disable this if you are exporting an animation for an armature that is '
|
||||
'attached to another object, such as a weapon or a shield',
|
||||
)
|
||||
should_override_animation_data: BoolProperty(
|
||||
name='Override Animation Data',
|
||||
@ -121,26 +117,11 @@ class PsaExportPropertyGroup(PropertyGroup):
|
||||
)
|
||||
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
||||
bone_group_list_index: IntProperty(default=0, name='', description='')
|
||||
should_use_original_sequence_names: BoolProperty(
|
||||
default=False,
|
||||
name='Original Names',
|
||||
options=empty_set,
|
||||
update=should_use_original_sequence_names_updated,
|
||||
description='If the action was imported from the PSA Import panel, the original name of the sequence will be '
|
||||
'used instead of the Blender action name',
|
||||
)
|
||||
should_trim_timeline_marker_sequences: BoolProperty(
|
||||
default=True,
|
||||
name='Trim Sequences',
|
||||
options=empty_set,
|
||||
description='Frames without NLA track information at the boundaries of timeline markers will be excluded from '
|
||||
'the exported sequences '
|
||||
)
|
||||
should_ignore_bone_name_restrictions: BoolProperty(
|
||||
default=False,
|
||||
name='Ignore Bone Name Restrictions',
|
||||
description='Bone names restrictions will be ignored. Note that bone names without properly formatted names '
|
||||
'cannot be referenced in scripts.'
|
||||
'cannot be referenced in scripts'
|
||||
)
|
||||
sequence_name_prefix: StringProperty(name='Prefix', options=empty_set)
|
||||
sequence_name_suffix: StringProperty(name='Suffix', options=empty_set)
|
||||
@ -159,6 +140,10 @@ class PsaExportPropertyGroup(PropertyGroup):
|
||||
name='Show assets',
|
||||
options=empty_set,
|
||||
description='Show actions that belong to an asset library')
|
||||
sequence_filter_pose_marker: BoolProperty(
|
||||
default=False,
|
||||
name='Show pose markers',
|
||||
options=empty_set)
|
||||
sequence_use_filter_sort_reverse: BoolProperty(default=True, options=empty_set)
|
||||
|
||||
|
||||
@ -170,6 +155,193 @@ def is_bone_filter_mode_item_available(context, identifier):
|
||||
return True
|
||||
|
||||
|
||||
def get_timeline_marker_sequence_frame_ranges(animation_data: AnimData, context: Context, marker_names: List[str]) -> Dict:
|
||||
# Timeline markers need to be sorted so that we can determine the sequence start and end positions.
|
||||
sequence_frame_ranges = dict()
|
||||
sorted_timeline_markers = list(sorted(context.scene.timeline_markers, key=lambda x: x.frame))
|
||||
sorted_timeline_marker_names = list(map(lambda x: x.name, sorted_timeline_markers))
|
||||
|
||||
for marker_name in marker_names:
|
||||
marker = context.scene.timeline_markers[marker_name]
|
||||
frame_start = marker.frame
|
||||
# Determine the final frame of the sequence based on the next marker.
|
||||
# If no subsequent marker exists, use the maximum frame_end from all NLA strips.
|
||||
marker_index = sorted_timeline_marker_names.index(marker_name)
|
||||
next_marker_index = marker_index + 1
|
||||
frame_end = 0
|
||||
if next_marker_index < len(sorted_timeline_markers):
|
||||
# There is a next marker. Use that next marker's frame position as the last frame of this sequence.
|
||||
frame_end = sorted_timeline_markers[next_marker_index].frame
|
||||
nla_strips = get_nla_strips_in_timeframe(animation_data, marker.frame, frame_end)
|
||||
if len(nla_strips) > 0:
|
||||
frame_end = min(frame_end, max(map(lambda nla_strip: nla_strip.frame_end, nla_strips)))
|
||||
frame_start = max(frame_start, min(map(lambda nla_strip: nla_strip.frame_start, nla_strips)))
|
||||
else:
|
||||
# No strips in between this marker and the next, just export this as a one-frame animation.
|
||||
frame_end = frame_start
|
||||
else:
|
||||
# There is no next marker.
|
||||
# Find the final frame of all the NLA strips and use that as the last frame of this sequence.
|
||||
for nla_track in animation_data.nla_tracks:
|
||||
if nla_track.mute:
|
||||
continue
|
||||
for strip in nla_track.strips:
|
||||
frame_end = max(frame_end, strip.frame_end)
|
||||
|
||||
if frame_start > frame_end:
|
||||
continue
|
||||
|
||||
sequence_frame_ranges[marker_name] = int(frame_start), int(frame_end)
|
||||
|
||||
return sequence_frame_ranges
|
||||
|
||||
|
||||
def get_sequence_fps(context: Context, fps_source: str, fps_custom: float, actions: Iterable[Action]) -> float:
|
||||
if fps_source == 'SCENE':
|
||||
return context.scene.render.fps
|
||||
elif fps_source == 'CUSTOM':
|
||||
return fps_custom
|
||||
elif fps_source == 'ACTION_METADATA':
|
||||
# Get the minimum value of action metadata FPS values.
|
||||
fps_list = []
|
||||
for action in filter(lambda x: 'psa_sequence_fps' in x, actions):
|
||||
fps = action['psa_sequence_fps']
|
||||
if type(fps) == int or type(fps) == float:
|
||||
fps_list.append(fps)
|
||||
if len(fps_list) > 0:
|
||||
return min(fps_list)
|
||||
else:
|
||||
# No valid action metadata to use, fallback to scene FPS
|
||||
return context.scene.render.fps
|
||||
else:
|
||||
raise RuntimeError(f'Invalid FPS source "{fps_source}"')
|
||||
|
||||
|
||||
def is_action_for_armature(armature: Armature, action: Action):
|
||||
if len(action.fcurves) == 0:
|
||||
return False
|
||||
bone_names = set([x.name for x in armature.bones])
|
||||
for fcurve in action.fcurves:
|
||||
match = re.match(r'pose\.bones\[\"([^\"]+)\"](\[\"([^\"]+)\"])?', fcurve.data_path)
|
||||
if not match:
|
||||
continue
|
||||
bone_name = match.group(1)
|
||||
if bone_name in bone_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_animation_data_object(context: Context) -> Object:
|
||||
pg: PsaExportPropertyGroup = getattr(context.scene, 'psa_export')
|
||||
|
||||
active_object = context.view_layer.objects.active
|
||||
|
||||
if active_object.type != 'ARMATURE':
|
||||
raise RuntimeError('Selected object must be an Armature')
|
||||
|
||||
if pg.should_override_animation_data:
|
||||
animation_data_object = pg.animation_data_override
|
||||
else:
|
||||
animation_data_object = active_object
|
||||
|
||||
return animation_data_object
|
||||
|
||||
|
||||
def get_sequences_from_action(action: Action) -> List[Tuple[str, int, int]]:
|
||||
frame_start = int(action.frame_range[0])
|
||||
frame_end = int(action.frame_range[1])
|
||||
reversed_pattern = r'(.+)/(.+)'
|
||||
reversed_match = re.match(reversed_pattern, action.name)
|
||||
if reversed_match:
|
||||
forward_name = reversed_match.group(1)
|
||||
backwards_name = reversed_match.group(2)
|
||||
return [
|
||||
(forward_name, frame_start, frame_end),
|
||||
(backwards_name, frame_end, frame_start)
|
||||
]
|
||||
else:
|
||||
return [(action.name, frame_start, frame_end)]
|
||||
|
||||
|
||||
def get_sequences_from_action_pose_marker(action: Action, pose_markers: List[TimelineMarker], pose_marker: TimelineMarker, pose_marker_index: int) -> List[Tuple[str, int, int]]:
|
||||
frame_start = pose_marker.frame
|
||||
if pose_marker_index + 1 < len(pose_markers):
|
||||
frame_end = pose_markers[pose_marker_index + 1].frame
|
||||
else:
|
||||
frame_end = int(action.frame_range[1])
|
||||
reversed_pattern = r'(.+)/(.+)'
|
||||
reversed_match = re.match(reversed_pattern, pose_marker.name)
|
||||
if reversed_match:
|
||||
forward_name = reversed_match.group(1)
|
||||
backwards_name = reversed_match.group(2)
|
||||
return [
|
||||
(forward_name, frame_start, frame_end),
|
||||
(backwards_name, frame_end, frame_start)
|
||||
]
|
||||
else:
|
||||
return [(pose_marker.name, frame_start, frame_end)]
|
||||
|
||||
|
||||
def update_actions_and_timeline_markers(context: Context, armature: Armature):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
|
||||
# Clear actions and markers.
|
||||
pg.action_list.clear()
|
||||
pg.marker_list.clear()
|
||||
|
||||
# Get animation data.
|
||||
animation_data_object = get_animation_data_object(context)
|
||||
animation_data = animation_data_object.animation_data if animation_data_object else None
|
||||
|
||||
if animation_data is None:
|
||||
return
|
||||
|
||||
# Populate actions list.
|
||||
for action in bpy.data.actions:
|
||||
if not is_action_for_armature(armature, action):
|
||||
continue
|
||||
|
||||
if not action.name.startswith('#'):
|
||||
for (name, frame_start, frame_end) in get_sequences_from_action(action):
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
item.name = name
|
||||
item.is_selected = False
|
||||
item.is_pose_marker = False
|
||||
item.frame_start = frame_start
|
||||
item.frame_end = frame_end
|
||||
|
||||
# Pose markers are not guaranteed to be in frame-order, so make sure that they are.
|
||||
pose_markers = sorted(action.pose_markers, key=lambda x: x.frame)
|
||||
for pose_marker_index, pose_marker in enumerate(pose_markers):
|
||||
if pose_marker.name.startswith('#'):
|
||||
continue
|
||||
for (name, frame_start, frame_end) in get_sequences_from_action_pose_marker(action, pose_markers, pose_marker, pose_marker_index):
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
item.name = name
|
||||
item.is_selected = False
|
||||
item.is_pose_marker = True
|
||||
item.frame_start = frame_start
|
||||
item.frame_end = frame_end
|
||||
|
||||
# Populate timeline markers list.
|
||||
marker_names = [x.name for x in context.scene.timeline_markers]
|
||||
sequence_frame_ranges = get_timeline_marker_sequence_frame_ranges(animation_data, context, marker_names)
|
||||
|
||||
for marker_name in marker_names:
|
||||
if marker_name not in sequence_frame_ranges:
|
||||
continue
|
||||
if marker_name.startswith('#'):
|
||||
continue
|
||||
item = pg.marker_list.add()
|
||||
item.name = marker_name
|
||||
item.is_selected = False
|
||||
frame_start, frame_end = sequence_frame_ranges[marker_name]
|
||||
item.frame_start = frame_start
|
||||
item.frame_end = frame_end
|
||||
|
||||
|
||||
class PsaExportOperator(Operator, ExportHelper):
|
||||
bl_idname = 'psa_export.operator'
|
||||
bl_label = 'Export'
|
||||
@ -184,7 +356,7 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
default='')
|
||||
|
||||
def __init__(self):
|
||||
self.armature = None
|
||||
self.armature_object = None
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
@ -228,7 +400,6 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_use_original_sequence_names')
|
||||
col.prop(pg, 'sequence_name_prefix')
|
||||
col.prop(pg, 'sequence_name_suffix')
|
||||
|
||||
@ -240,7 +411,6 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_trim_timeline_marker_sequences')
|
||||
col.prop(pg, 'sequence_name_prefix')
|
||||
col.prop(pg, 'sequence_name_suffix')
|
||||
|
||||
@ -275,19 +445,6 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
# ROOT MOTION
|
||||
layout.prop(pg, 'root_motion', text='Root Motion')
|
||||
|
||||
def is_action_for_armature(self, action):
|
||||
if len(action.fcurves) == 0:
|
||||
return False
|
||||
bone_names = set([x.name for x in self.armature.data.bones])
|
||||
for fcurve in action.fcurves:
|
||||
match = re.match(r'pose\.bones\[\"([^\"]+)\"](\[\"([^\"]+)\"])?', fcurve.data_path)
|
||||
if not match:
|
||||
continue
|
||||
bone_name = match.group(1)
|
||||
if bone_name in bone_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _check_context(cls, context):
|
||||
if context.view_layer.objects.active is None:
|
||||
@ -302,35 +459,14 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
except RuntimeError as e:
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
self.armature = context.view_layer.objects.active
|
||||
pg: PsaExportPropertyGroup = getattr(context.scene, 'psa_export')
|
||||
|
||||
# Populate actions list.
|
||||
pg.action_list.clear()
|
||||
for action in bpy.data.actions:
|
||||
if not self.is_action_for_armature(action):
|
||||
continue
|
||||
item = pg.action_list.add()
|
||||
item.action = action
|
||||
item.name = action.name
|
||||
item.is_selected = False
|
||||
self.armature_object = context.view_layer.objects.active
|
||||
|
||||
update_action_names(context)
|
||||
|
||||
# Populate timeline markers list.
|
||||
pg.marker_list.clear()
|
||||
for marker in context.scene.timeline_markers:
|
||||
item = pg.marker_list.add()
|
||||
item.name = marker.name
|
||||
item.is_selected = False
|
||||
|
||||
if len(pg.action_list) == 0 and len(pg.marker_list) == 0:
|
||||
# If there are no actions at all, we have nothing to export, so just cancel the operation.
|
||||
self.report({'ERROR_INVALID_CONTEXT'}, 'There are no actions or timeline markers to export.')
|
||||
return {'CANCELLED'}
|
||||
update_actions_and_timeline_markers(context, self.armature_object.data)
|
||||
|
||||
# Populate bone groups list.
|
||||
populate_bone_group_list(self.armature, pg.bone_group_list)
|
||||
populate_bone_group_list(self.armature_object, pg.bone_group_list)
|
||||
|
||||
context.window_manager.fileselect_add(self)
|
||||
|
||||
@ -339,21 +475,51 @@ class PsaExportOperator(Operator, ExportHelper):
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
|
||||
actions = [x.action for x in pg.action_list if x.is_selected]
|
||||
marker_names = [x.name for x in pg.marker_list if x.is_selected]
|
||||
# Ensure that we actually have items that we are going to be exporting.
|
||||
if pg.sequence_source == 'ACTIONS' and len(pg.action_list) == 0:
|
||||
raise RuntimeError('No actions were selected for export')
|
||||
elif pg.sequence_source == 'TIMELINE_MARKERS' and len(pg.marker_names) == 0:
|
||||
raise RuntimeError('No timeline markers were selected for export')
|
||||
|
||||
# Populate the export sequence list.
|
||||
animation_data_object = get_animation_data_object(context)
|
||||
animation_data = animation_data_object.animation_data
|
||||
|
||||
if animation_data is None:
|
||||
raise RuntimeError(f'No animation data for object \'{animation_data_object.name}\'')
|
||||
|
||||
export_sequences: List[PsaExportSequence] = []
|
||||
|
||||
if pg.sequence_source == 'ACTIONS':
|
||||
for action in filter(lambda x: x.is_selected, pg.action_list):
|
||||
if len(action.action.fcurves) == 0:
|
||||
continue
|
||||
export_sequence = PsaExportSequence()
|
||||
export_sequence.nla_state.action = action.action
|
||||
export_sequence.name = action.name
|
||||
export_sequence.nla_state.frame_start = action.frame_start
|
||||
export_sequence.nla_state.frame_end = action.frame_end
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, [action.action])
|
||||
export_sequences.append(export_sequence)
|
||||
elif pg.sequence_source == 'TIMELINE_MARKERS':
|
||||
for marker in pg.marker_list:
|
||||
export_sequence = PsaExportSequence()
|
||||
export_sequence.name = marker.name
|
||||
export_sequence.nla_state.action = None
|
||||
export_sequence.nla_state.frame_start = marker.frame_start
|
||||
export_sequence.nla_state.frame_end = marker.frame_end
|
||||
nla_strips_actions = set(
|
||||
map(lambda x: x.action, get_nla_strips_in_timeframe(animation_data, marker.frame_start, marker.frame_end)))
|
||||
export_sequence.fps = get_sequence_fps(context, pg.fps_source, pg.fps_custom, nla_strips_actions)
|
||||
export_sequences.append(export_sequence)
|
||||
else:
|
||||
raise ValueError(f'Unhandled sequence source: {pg.sequence_source}')
|
||||
|
||||
options = PsaBuildOptions()
|
||||
options.should_override_animation_data = pg.should_override_animation_data
|
||||
options.animation_data_override = pg.animation_data_override
|
||||
options.fps_source = pg.fps_source
|
||||
options.fps_custom = pg.fps_custom
|
||||
options.sequence_source = pg.sequence_source
|
||||
options.actions = actions
|
||||
options.marker_names = marker_names
|
||||
options.animation_data = animation_data
|
||||
options.sequences = export_sequences
|
||||
options.bone_filter_mode = pg.bone_filter_mode
|
||||
options.bone_group_indices = [x.index for x in pg.bone_group_list if x.is_selected]
|
||||
options.should_use_original_sequence_names = pg.should_use_original_sequence_names
|
||||
options.should_trim_timeline_marker_sequences = pg.should_trim_timeline_marker_sequences
|
||||
options.should_ignore_bone_name_restrictions = pg.should_ignore_bone_name_restrictions
|
||||
options.sequence_name_prefix = pg.sequence_name_prefix
|
||||
options.sequence_name_suffix = pg.sequence_name_suffix
|
||||
@ -391,6 +557,11 @@ def filter_sequences(pg: PsaExportPropertyGroup, sequences) -> List[int]:
|
||||
if hasattr(sequence, 'action') and sequence.action.asset_data is not None:
|
||||
flt_flags[i] &= ~bitflag_filter_item
|
||||
|
||||
if not pg.sequence_filter_pose_marker:
|
||||
for i, sequence in enumerate(sequences):
|
||||
if hasattr(sequence, 'is_pose_marker') and sequence.is_pose_marker:
|
||||
flt_flags[i] &= ~bitflag_filter_item
|
||||
|
||||
return flt_flags
|
||||
|
||||
|
||||
@ -410,10 +581,19 @@ class PSA_UL_ExportSequenceList(UIList):
|
||||
self.use_filter_show = True
|
||||
|
||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||
item = typing.cast(PsaExportActionListItem, item)
|
||||
is_pose_marker = hasattr(item, 'is_pose_marker') and item.is_pose_marker
|
||||
layout.prop(item, 'is_selected', icon_only=True, text=item.name)
|
||||
if hasattr(item, 'action') and item.action.asset_data is not None:
|
||||
layout.label(text='', icon='ASSET_MANAGER')
|
||||
|
||||
row = layout.row(align=True)
|
||||
row.alignment = 'RIGHT'
|
||||
if item.frame_end < item.frame_start:
|
||||
row.label(text='', icon='FRAME_PREV')
|
||||
if is_pose_marker:
|
||||
row.label(text=item.action.name, icon='PMARKER')
|
||||
|
||||
def draw_filter(self, context, layout):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
row = layout.row()
|
||||
@ -425,12 +605,14 @@ class PSA_UL_ExportSequenceList(UIList):
|
||||
if pg.sequence_source == 'ACTIONS':
|
||||
subrow = row.row(align=True)
|
||||
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
|
||||
subrow.prop(pg, 'sequence_filter_pose_marker', icon_only=True, icon='PMARKER')
|
||||
|
||||
def filter_items(self, context, data, prop):
|
||||
pg = getattr(context.scene, 'psa_export')
|
||||
actions = getattr(data, prop)
|
||||
flt_flags = filter_sequences(pg, actions)
|
||||
flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
||||
# flt_neworder = bpy.types.UI_UL_list.sort_items_by_name(actions, 'name')
|
||||
flt_neworder = list(range(len(actions)))
|
||||
return flt_flags, flt_neworder
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@ from typing import List, Optional
|
||||
import bpy
|
||||
import numpy
|
||||
from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerProperty, IntProperty, EnumProperty
|
||||
from bpy.types import Operator, UIList, PropertyGroup, Panel, FCurve
|
||||
from bpy.types import Operator, UIList, PropertyGroup, FCurve
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
from mathutils import Vector, Quaternion
|
||||
|
||||
@ -215,7 +215,6 @@ def import_psa(psa_reader: PsaReader, armature_object: bpy.types.Object, options
|
||||
|
||||
# Write meta-data.
|
||||
if options.should_write_metadata:
|
||||
action['psa_sequence_name'] = sequence_name
|
||||
action['psa_sequence_fps'] = sequence.fps
|
||||
|
||||
action.use_fake_user = options.should_use_fake_user
|
||||
@ -243,14 +242,14 @@ class PsaImportActionListItem(PropertyGroup):
|
||||
is_selected: BoolProperty(default=False, options=empty_set)
|
||||
|
||||
|
||||
def load_psa_file(context):
|
||||
def load_psa_file(context, filepath: str):
|
||||
pg = context.scene.psa_import
|
||||
pg.sequence_list.clear()
|
||||
pg.psa.bones.clear()
|
||||
pg.psa_error = ''
|
||||
try:
|
||||
# Read the file and populate the action list.
|
||||
p = os.path.abspath(pg.psa_file_path)
|
||||
p = os.path.abspath(filepath)
|
||||
psa_reader = PsaReader(p)
|
||||
for sequence in psa_reader.sequences.values():
|
||||
item = pg.sequence_list.add()
|
||||
@ -262,8 +261,8 @@ def load_psa_file(context):
|
||||
pg.psa_error = str(e)
|
||||
|
||||
|
||||
def on_psa_file_path_updated(property_, context):
|
||||
load_psa_file(context)
|
||||
def on_psa_file_path_updated(cls, context):
|
||||
load_psa_file(context, cls.filepath)
|
||||
|
||||
|
||||
class PsaBonePropertyGroup(PropertyGroup):
|
||||
@ -276,7 +275,6 @@ class PsaDataPropertyGroup(PropertyGroup):
|
||||
|
||||
|
||||
class PsaImportPropertyGroup(PropertyGroup):
|
||||
psa_file_path: StringProperty(default='', options=empty_set, update=on_psa_file_path_updated, name='PSA File Path')
|
||||
psa_error: StringProperty(default='')
|
||||
psa: PointerProperty(type=PsaDataPropertyGroup)
|
||||
sequence_list: CollectionProperty(type=PsaImportActionListItem)
|
||||
@ -290,7 +288,7 @@ class PsaImportPropertyGroup(PropertyGroup):
|
||||
options=empty_set)
|
||||
should_use_action_name_prefix: BoolProperty(default=False, name='Prefix Action Name', options=empty_set)
|
||||
action_name_prefix: StringProperty(default='', name='Prefix', options=empty_set)
|
||||
should_overwrite: BoolProperty(default=False, name='Reuse Existing Actions', options=empty_set,
|
||||
should_overwrite: BoolProperty(default=False, name='Overwrite', options=empty_set,
|
||||
description='If an action with a matching name already exists, the existing action '
|
||||
'will have it\'s data overwritten instead of a new action being created')
|
||||
should_write_keyframes: BoolProperty(default=True, name='Keyframes', options=empty_set)
|
||||
@ -477,117 +475,6 @@ class PsaImportSequencesDeselectAll(Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSA_PT_ImportPanel_Advanced(Panel):
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_label = 'Advanced'
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
bl_parent_id = 'PSA_PT_ImportPanel'
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'bone_mapping_mode')
|
||||
|
||||
if pg.should_write_keyframes:
|
||||
col = layout.column(heading='Keyframes')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_convert_to_samples')
|
||||
col.separator()
|
||||
|
||||
col = layout.column(heading='Options')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_use_fake_user')
|
||||
col.prop(pg, 'should_stash')
|
||||
col.prop(pg, 'should_use_action_name_prefix')
|
||||
|
||||
if pg.should_use_action_name_prefix:
|
||||
col.prop(pg, 'action_name_prefix')
|
||||
|
||||
|
||||
class PSA_PT_ImportPanel(Panel):
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_label = 'PSA Import'
|
||||
bl_context = 'data'
|
||||
bl_category = 'PSA Import'
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.view_layer.objects.active.type == 'ARMATURE'
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
|
||||
row = layout.row(align=True)
|
||||
row.operator(PsaImportSelectFile.bl_idname, text='', icon='FILEBROWSER')
|
||||
row.prop(pg, 'psa_file_path', text='')
|
||||
row.operator(PsaImportFileReload.bl_idname, text='', icon='FILE_REFRESH')
|
||||
|
||||
if pg.psa_error != '':
|
||||
row = layout.row()
|
||||
row.label(text='File could not be read', icon='ERROR')
|
||||
|
||||
box = layout.box()
|
||||
|
||||
box.label(text=f'Sequences ({len(pg.sequence_list)})', icon='ARMATURE_DATA')
|
||||
|
||||
# select
|
||||
rows = max(3, min(len(pg.sequence_list), 10))
|
||||
|
||||
row = box.row()
|
||||
col = row.column()
|
||||
|
||||
row2 = col.row(align=True)
|
||||
row2.label(text='Select')
|
||||
row2.operator(PsaImportSequencesFromText.bl_idname, text='', icon='TEXT')
|
||||
row2.operator(PsaImportSequencesSelectAll.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row2.operator(PsaImportSequencesDeselectAll.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
|
||||
col = col.row()
|
||||
col.template_list('PSA_UL_ImportSequenceList', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
||||
|
||||
col = layout.column(heading='')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_overwrite')
|
||||
|
||||
col = layout.column(heading='Write')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_write_keyframes')
|
||||
col.prop(pg, 'should_write_metadata')
|
||||
|
||||
selected_sequence_count = sum(map(lambda x: x.is_selected, pg.sequence_list))
|
||||
|
||||
row = layout.row()
|
||||
|
||||
import_button_text = 'Import'
|
||||
if selected_sequence_count > 0:
|
||||
import_button_text = f'Import ({selected_sequence_count})'
|
||||
|
||||
row.operator(PsaImportOperator.bl_idname, text=import_button_text)
|
||||
|
||||
|
||||
class PsaImportFileReload(Operator):
|
||||
bl_idname = 'psa_import.file_reload'
|
||||
bl_label = 'Refresh'
|
||||
bl_options = {'INTERNAL'}
|
||||
bl_description = 'Refresh the PSA file'
|
||||
|
||||
def execute(self, context):
|
||||
load_psa_file(context)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class PsaImportSelectFile(Operator):
|
||||
bl_idname = 'psa_import.select_file'
|
||||
bl_label = 'Select'
|
||||
@ -605,23 +492,32 @@ class PsaImportSelectFile(Operator):
|
||||
return {"RUNNING_MODAL"}
|
||||
|
||||
|
||||
class PsaImportOperator(Operator):
|
||||
class PsaImportOperator(Operator, ImportHelper):
|
||||
bl_idname = 'psa_import.import'
|
||||
bl_label = 'Import'
|
||||
bl_description = 'Import the selected animations into the scene as actions'
|
||||
bl_options = {'INTERNAL', 'UNDO'}
|
||||
|
||||
filename_ext = '.psa'
|
||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for importing the PSA file',
|
||||
maxlen=1024,
|
||||
default='',
|
||||
update=on_psa_file_path_updated)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
active_object = context.view_layer.objects.active
|
||||
if active_object is None or active_object.type != 'ARMATURE':
|
||||
cls.poll_message_set('The active object must be an armature')
|
||||
return False
|
||||
return any(map(lambda x: x.is_selected, pg.sequence_list))
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
psa_reader = PsaReader(pg.psa_file_path)
|
||||
psa_reader = PsaReader(self.filepath)
|
||||
sequence_names = [x.action_name for x in pg.sequence_list if x.is_selected]
|
||||
|
||||
options = PsaImportOptions()
|
||||
@ -646,27 +542,72 @@ class PsaImportOperator(Operator):
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context: bpy.types.Context, event: bpy.types.Event):
|
||||
# Attempt to load the PSA file for the pre-selected file.
|
||||
load_psa_file(context, self.filepath)
|
||||
|
||||
class PsaImportFileSelectOperator(Operator, ImportHelper):
|
||||
bl_idname = 'psa_import.file_select'
|
||||
bl_label = 'File Select'
|
||||
bl_options = {'INTERNAL'}
|
||||
filename_ext = '.psa'
|
||||
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for importing the PSA file',
|
||||
maxlen=1024,
|
||||
default='')
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def execute(self, context):
|
||||
def draw(self, context: bpy.types.Context):
|
||||
layout = self.layout
|
||||
pg = getattr(context.scene, 'psa_import')
|
||||
pg.psa_file_path = self.filepath
|
||||
return {'FINISHED'}
|
||||
|
||||
if pg.psa_error:
|
||||
row = layout.row()
|
||||
row.label(text='Select a PSA file', icon='ERROR')
|
||||
else:
|
||||
box = layout.box()
|
||||
|
||||
box.label(text=f'Sequences ({len(pg.sequence_list)})', icon='ARMATURE_DATA')
|
||||
|
||||
# Select buttons.
|
||||
rows = max(3, min(len(pg.sequence_list), 10))
|
||||
|
||||
row = box.row()
|
||||
col = row.column()
|
||||
|
||||
row2 = col.row(align=True)
|
||||
row2.label(text='Select')
|
||||
row2.operator(PsaImportSequencesFromText.bl_idname, text='', icon='TEXT')
|
||||
row2.operator(PsaImportSequencesSelectAll.bl_idname, text='All', icon='CHECKBOX_HLT')
|
||||
row2.operator(PsaImportSequencesDeselectAll.bl_idname, text='None', icon='CHECKBOX_DEHLT')
|
||||
|
||||
col = col.row()
|
||||
col.template_list('PSA_UL_ImportSequenceList', '', pg, 'sequence_list', pg, 'sequence_list_index', rows=rows)
|
||||
|
||||
col = layout.column(heading='')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_overwrite')
|
||||
|
||||
col = layout.column(heading='Write')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_write_keyframes')
|
||||
col.prop(pg, 'should_write_metadata')
|
||||
|
||||
col = layout.column()
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'bone_mapping_mode')
|
||||
|
||||
if pg.should_write_keyframes:
|
||||
col = layout.column(heading='Keyframes')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_convert_to_samples')
|
||||
col.separator()
|
||||
|
||||
col = layout.column(heading='Options')
|
||||
col.use_property_split = True
|
||||
col.use_property_decorate = False
|
||||
col.prop(pg, 'should_use_fake_user')
|
||||
col.prop(pg, 'should_stash')
|
||||
col.prop(pg, 'should_use_action_name_prefix')
|
||||
|
||||
if pg.should_use_action_name_prefix:
|
||||
col.prop(pg, 'action_name_prefix')
|
||||
|
||||
|
||||
classes = (
|
||||
@ -680,10 +621,6 @@ classes = (
|
||||
PsaImportSequencesSelectAll,
|
||||
PsaImportSequencesDeselectAll,
|
||||
PsaImportSequencesFromText,
|
||||
PsaImportFileReload,
|
||||
PSA_PT_ImportPanel,
|
||||
PSA_PT_ImportPanel_Advanced,
|
||||
PsaImportOperator,
|
||||
PsaImportFileSelectOperator,
|
||||
PsaImportSelectFile,
|
||||
)
|
||||
|
@ -105,10 +105,14 @@ class Psk(object):
|
||||
def has_vertex_normals(self):
|
||||
return len(self.vertex_normals) > 0
|
||||
|
||||
@property
|
||||
def has_material_references(self):
|
||||
return len(self.material_references) > 0
|
||||
|
||||
@property
|
||||
def has_morph_data(self):
|
||||
return len(self.morph_infos) > 0
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.points: List[Vector3] = []
|
||||
self.wedges: List[Psk.Wedge] = []
|
||||
@ -121,3 +125,4 @@ class Psk(object):
|
||||
self.vertex_normals: List[Vector3] = []
|
||||
self.morph_infos: List[Psk.MorphInfo] = []
|
||||
self.morph_data: List[Psk.MorphData] = []
|
||||
self.material_references: List[str] = []
|
||||
|
@ -258,7 +258,7 @@ class PskExportPropertyGroup(PropertyGroup):
|
||||
default=False,
|
||||
name='Ignore Bone Name Restrictions',
|
||||
description='Bone names restrictions will be ignored. Note that bone names without properly formatted names '
|
||||
'cannot be referenced in scripts.'
|
||||
'cannot be referenced in scripts'
|
||||
)
|
||||
|
||||
|
||||
|
@ -7,19 +7,20 @@ import bmesh
|
||||
import bpy
|
||||
import numpy as np
|
||||
from bpy.props import BoolProperty, EnumProperty, FloatProperty, StringProperty
|
||||
from bpy.types import Operator, PropertyGroup, VertexGroup
|
||||
from bpy.types import Operator, VertexGroup
|
||||
from bpy_extras.io_utils import ImportHelper
|
||||
from mathutils import Quaternion, Vector, Matrix
|
||||
|
||||
from .data import Psk
|
||||
from .reader import read_psk
|
||||
from ..helpers import rgb_to_srgb
|
||||
from ..helpers import rgb_to_srgb, is_bdk_addon_loaded
|
||||
|
||||
|
||||
class PskImportOptions(object):
|
||||
class PskImportOptions:
|
||||
def __init__(self):
|
||||
self.name = ''
|
||||
self.should_import_mesh = True
|
||||
self.should_reuse_materials = True
|
||||
self.should_import_vertex_colors = True
|
||||
self.vertex_color_space = 'sRGB'
|
||||
self.should_import_vertex_normals = True
|
||||
@ -27,9 +28,10 @@ class PskImportOptions(object):
|
||||
self.should_import_skeleton = True
|
||||
self.should_import_shape_keys = True
|
||||
self.bone_length = 1.0
|
||||
self.should_import_materials = True
|
||||
|
||||
|
||||
class ImportBone(object):
|
||||
class ImportBone:
|
||||
"""
|
||||
Intermediate bone type for the purpose of construction.
|
||||
"""
|
||||
@ -126,10 +128,24 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
mesh_object = bpy.data.objects.new(options.name, mesh_data)
|
||||
|
||||
# MATERIALS
|
||||
for material in psk.materials:
|
||||
# TODO: re-use of materials should be an option
|
||||
bpy_material = bpy.data.materials.new(material.name.decode('utf-8'))
|
||||
mesh_data.materials.append(bpy_material)
|
||||
if options.should_import_materials:
|
||||
for material_index, psk_material in enumerate(psk.materials):
|
||||
material_name = psk_material.name.decode('utf-8')
|
||||
material = None
|
||||
if options.should_reuse_materials and material_name in bpy.data.materials:
|
||||
# Material already exists, just re-use it.
|
||||
material = bpy.data.materials[material_name]
|
||||
elif is_bdk_addon_loaded() and psk.has_material_references:
|
||||
# Material does not yet exist and we have the BDK addon installed.
|
||||
# Attempt to load it using BDK addon's operator.
|
||||
material_reference = psk.material_references[material_index]
|
||||
if material_reference and bpy.ops.bdk.link_material(reference=material_reference) == {'FINISHED'}:
|
||||
material = bpy.data.materials[material_name]
|
||||
else:
|
||||
# Just create a blank material.
|
||||
material = bpy.data.materials.new(material_name)
|
||||
material.use_nodes = True
|
||||
mesh_data.materials.append(material)
|
||||
|
||||
bm = bmesh.new()
|
||||
|
||||
@ -266,7 +282,19 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
|
||||
empty_set = set()
|
||||
|
||||
|
||||
class PskImportPropertyGroup(PropertyGroup):
|
||||
class PskImportOperator(Operator, ImportHelper):
|
||||
bl_idname = 'import_scene.psk'
|
||||
bl_label = 'Import'
|
||||
bl_options = {'INTERNAL', 'UNDO', 'PRESET'}
|
||||
__doc__ = 'Load a PSK file'
|
||||
filename_ext = '.psk'
|
||||
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for exporting the PSK file',
|
||||
maxlen=1024,
|
||||
default='')
|
||||
|
||||
should_import_vertex_colors: BoolProperty(
|
||||
default=True,
|
||||
options=empty_set,
|
||||
@ -301,6 +329,17 @@ class PskImportPropertyGroup(PropertyGroup):
|
||||
options=empty_set,
|
||||
description='Import mesh'
|
||||
)
|
||||
should_import_materials: BoolProperty(
|
||||
default=True,
|
||||
name='Import Materials',
|
||||
options=empty_set,
|
||||
)
|
||||
should_reuse_materials: BoolProperty(
|
||||
default=True,
|
||||
name='Reuse Materials',
|
||||
options=empty_set,
|
||||
description='Existing materials with matching names will be reused when available'
|
||||
)
|
||||
should_import_skeleton: BoolProperty(
|
||||
default=True,
|
||||
name='Import Skeleton',
|
||||
@ -318,40 +357,25 @@ class PskImportPropertyGroup(PropertyGroup):
|
||||
)
|
||||
should_import_shape_keys: BoolProperty(
|
||||
default=True,
|
||||
name='Import Shape Keys',
|
||||
name='Shape Keys',
|
||||
options=empty_set,
|
||||
description='Import shape keys, if available'
|
||||
)
|
||||
|
||||
|
||||
class PskImportOperator(Operator, ImportHelper):
|
||||
bl_idname = 'import.psk'
|
||||
bl_label = 'Import'
|
||||
bl_options = {'INTERNAL', 'UNDO'}
|
||||
__doc__ = 'Load a PSK file'
|
||||
filename_ext = '.psk'
|
||||
filter_glob: StringProperty(default='*.psk;*.pskx', options={'HIDDEN'})
|
||||
filepath: StringProperty(
|
||||
name='File Path',
|
||||
description='File path used for exporting the PSK file',
|
||||
maxlen=1024,
|
||||
default='')
|
||||
|
||||
def execute(self, context):
|
||||
pg = getattr(context.scene, 'psk_import')
|
||||
|
||||
psk = read_psk(self.filepath)
|
||||
|
||||
options = PskImportOptions()
|
||||
options.name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||
options.should_import_mesh = pg.should_import_mesh
|
||||
options.should_import_extra_uvs = pg.should_import_extra_uvs
|
||||
options.should_import_vertex_colors = pg.should_import_vertex_colors
|
||||
options.should_import_vertex_normals = pg.should_import_vertex_normals
|
||||
options.vertex_color_space = pg.vertex_color_space
|
||||
options.should_import_skeleton = pg.should_import_skeleton
|
||||
options.should_import_shape_keys = pg.should_import_shape_keys
|
||||
options.bone_length = pg.bone_length
|
||||
options.should_import_mesh = self.should_import_mesh
|
||||
options.should_import_extra_uvs = self.should_import_extra_uvs
|
||||
options.should_import_vertex_colors = self.should_import_vertex_colors
|
||||
options.should_import_vertex_normals = self.should_import_vertex_normals
|
||||
options.vertex_color_space = self.vertex_color_space
|
||||
options.should_import_skeleton = self.should_import_skeleton
|
||||
options.bone_length = self.bone_length
|
||||
options.should_import_materials = self.should_import_materials
|
||||
options.should_import_shape_keys = self.should_import_shape_keys
|
||||
|
||||
result = import_psk(psk, context, options)
|
||||
|
||||
@ -365,28 +389,27 @@ class PskImportOperator(Operator, ImportHelper):
|
||||
return {'FINISHED'}
|
||||
|
||||
def draw(self, context):
|
||||
pg = getattr(context.scene, 'psk_import')
|
||||
layout = self.layout
|
||||
layout.prop(pg, 'should_import_mesh')
|
||||
layout.prop(self, 'should_import_materials')
|
||||
layout.prop(self, 'should_import_mesh')
|
||||
row = layout.column()
|
||||
row.use_property_split = True
|
||||
row.use_property_decorate = False
|
||||
if pg.should_import_mesh:
|
||||
row.prop(pg, 'should_import_vertex_normals')
|
||||
row.prop(pg, 'should_import_extra_uvs')
|
||||
row.prop(pg, 'should_import_vertex_colors')
|
||||
if pg.should_import_vertex_colors:
|
||||
row.prop(pg, 'vertex_color_space')
|
||||
layout.prop(pg, 'should_import_skeleton')
|
||||
if self.should_import_mesh:
|
||||
row.prop(self, 'should_import_vertex_normals')
|
||||
row.prop(self, 'should_import_extra_uvs')
|
||||
row.prop(self, 'should_import_vertex_colors')
|
||||
if self.should_import_vertex_colors:
|
||||
row.prop(self, 'vertex_color_space')
|
||||
row.prop(self, 'should_import_shape_keys')
|
||||
layout.prop(self, 'should_import_skeleton')
|
||||
row = layout.column()
|
||||
row.use_property_split = True
|
||||
row.use_property_decorate = False
|
||||
if pg.should_import_skeleton:
|
||||
row.prop(pg, 'bone_length')
|
||||
layout.prop(pg, 'should_import_shape_keys')
|
||||
if self.should_import_skeleton:
|
||||
row.prop(self, 'bone_length')
|
||||
|
||||
|
||||
classes = (
|
||||
PskImportOperator,
|
||||
PskImportPropertyGroup,
|
||||
)
|
||||
|
@ -1,5 +1,8 @@
|
||||
import ctypes
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
from .data import *
|
||||
|
||||
@ -13,8 +16,22 @@ def _read_types(fp, data_class, section: Section, data):
|
||||
offset += section.data_size
|
||||
|
||||
|
||||
def _read_material_references(path: str) -> List[str]:
|
||||
property_file_path = Path(path).with_suffix('.props.txt')
|
||||
if not property_file_path.is_file():
|
||||
# Property file does not exist.
|
||||
return []
|
||||
# Do a crude regex match to find the Material list entries.
|
||||
contents = property_file_path.read_text()
|
||||
pattern = r"Material\s*=\s*([^\s^,]+)"
|
||||
return re.findall(pattern, contents)
|
||||
|
||||
|
||||
def read_psk(path: str) -> Psk:
|
||||
|
||||
psk = Psk()
|
||||
|
||||
# Read the PSK file sections.
|
||||
with open(path, 'rb') as fp:
|
||||
while fp.read(1):
|
||||
fp.seek(-1, 1)
|
||||
@ -53,5 +70,12 @@ def read_psk(path: str) -> Psk:
|
||||
else:
|
||||
# Section is not handled, skip it.
|
||||
fp.seek(section.data_size * section.data_count, os.SEEK_CUR)
|
||||
print(f'Unrecognized section "{section.name} at position {fp.tell():15}"')
|
||||
warnings.warn(f'Unrecognized section "{section.name} at position {fp.tell():15}"')
|
||||
|
||||
'''
|
||||
UEViewer exports a sidecar file (*.props.txt) with fully-qualified reference paths for each material
|
||||
(e.g., Texture'Package.Group.Object').
|
||||
'''
|
||||
psk.material_references = _read_material_references(path)
|
||||
|
||||
return psk
|
||||
|
@ -1,5 +1,6 @@
|
||||
import bpy.props
|
||||
from bpy.props import StringProperty, IntProperty, BoolProperty
|
||||
from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType
|
||||
from bpy.types import PropertyGroup, UIList, UILayout, Context, AnyType, Operator
|
||||
|
||||
|
||||
class PSX_UL_BoneGroupList(UIList):
|
||||
@ -11,6 +12,56 @@ class PSX_UL_BoneGroupList(UIList):
|
||||
row.label(text=str(getattr(item, 'count')), icon='BONE_DATA')
|
||||
|
||||
|
||||
class PSX_OT_MaterialPathAdd(Operator):
|
||||
bl_idname = 'psx.material_paths_add'
|
||||
bl_label = 'Add Material Path'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
directory: bpy.props.StringProperty(subtype='DIR_PATH', options={'HIDDEN'})
|
||||
filter_folder: bpy.props.BoolProperty(default=True, options={'HIDDEN'})
|
||||
|
||||
def invoke(self, context: 'Context', event: 'Event'):
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def execute(self, context: 'Context'):
|
||||
m = context.preferences.addons[__package__].preferences.material_path_list.add()
|
||||
m.path = self.directory
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSX_OT_MaterialPathRemove(Operator):
|
||||
bl_idname = 'psx.material_paths_remove'
|
||||
bl_label = 'Remove Material Path'
|
||||
bl_options = {'INTERNAL'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context: 'Context'):
|
||||
preferences = context.preferences.addons[__package__].preferences
|
||||
return preferences.material_path_index >= 0
|
||||
|
||||
def execute(self, context: 'Context'):
|
||||
preferences = context.preferences.addons[__package__].preferences
|
||||
preferences.material_path_list.remove(preferences.material_path_index)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class PSX_UL_MaterialPathList(UIList):
|
||||
|
||||
def draw_item(self,
|
||||
context: 'Context',
|
||||
layout: 'UILayout',
|
||||
data: 'AnyType',
|
||||
item: 'AnyType',
|
||||
icon: int,
|
||||
active_data: 'AnyType',
|
||||
active_property: str,
|
||||
index: int = 0,
|
||||
flt_flag: int = 0):
|
||||
row = layout.row()
|
||||
row.label(text=getattr(item, 'path'))
|
||||
|
||||
|
||||
class BoneGroupListItem(PropertyGroup):
|
||||
name: StringProperty()
|
||||
index: IntProperty()
|
||||
@ -21,4 +72,7 @@ class BoneGroupListItem(PropertyGroup):
|
||||
classes = (
|
||||
BoneGroupListItem,
|
||||
PSX_UL_BoneGroupList,
|
||||
PSX_UL_MaterialPathList,
|
||||
PSX_OT_MaterialPathAdd,
|
||||
PSX_OT_MaterialPathRemove
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user