mirror of
https://github.com/DarklightGames/io_scene_psk_psa.git
synced 2025-02-08 06:48:19 +01:00
Merge branch 'feature-importers'
This commit is contained in:
commit
dba6983ae0
25
README.md
25
README.md
@ -1,4 +1,4 @@
|
|||||||
This Blender add-on allows you to export meshes and animations to the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats).
|
This Blender add-on allows you to import and export meshes and animations to the [PSK and PSA file formats](https://wiki.beyondunreal.com/PSK_%26_PSA_file_formats).
|
||||||
|
|
||||||
# Installation
|
# Installation
|
||||||
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
|
1. Download the zip file for the latest version from the [releases](https://github.com/DarklightGames/io_export_psk_psa/releases) page.
|
||||||
@ -7,7 +7,7 @@ This Blender add-on allows you to export meshes and animations to the [PSK and P
|
|||||||
4. Select the "Add-ons" tab.
|
4. Select the "Add-ons" tab.
|
||||||
5. Click the "Install..." button.
|
5. Click the "Install..." button.
|
||||||
6. Select the .zip file that you downloaded earlier and click "Install Add-on".
|
6. Select the .zip file that you downloaded earlier and click "Install Add-on".
|
||||||
7. Enable the newly added "Import-Export: PSK/PSA Exporter" addon.
|
7. Enable the newly added "Import-Export: PSK/PSA Importer/Exporter" addon.
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
## Exporting a PSK
|
## Exporting a PSK
|
||||||
@ -15,15 +15,26 @@ This Blender add-on allows you to export meshes and animations to the [PSK and P
|
|||||||
3. Navigate to File > Export > Unreal PSK (.psk)
|
3. Navigate to File > Export > Unreal PSK (.psk)
|
||||||
4. Enter the file name and click "Export".
|
4. Enter the file name and click "Export".
|
||||||
|
|
||||||
|
## Importing a PSK
|
||||||
|
1. Navigate to File > Import > Unreal PSK (.psk)
|
||||||
|
2. Select the PSK file you want to import and click "Import"
|
||||||
|
|
||||||
## Exporting a PSA
|
## Exporting a PSA
|
||||||
1. Select the armature objects you wish to export.
|
1. Select the armature objects you wish to export.
|
||||||
2. Navigate to File > Export > Unreal PSA (.psa)
|
2. Navigate to File > Export > Unreal PSA (.psa)
|
||||||
3. Enter the file name and click "Export".
|
3. Enter the file name and click "Export".
|
||||||
|
|
||||||
|
## Importing a PSA
|
||||||
|
1. Select the armature object that you wish you import actions to.
|
||||||
|
2. Navigate to the Object Data Properties tab of the Properties editor.
|
||||||
|
3. Navigate to the PSA Import panel.
|
||||||
|
4. Click "Select PSA File".
|
||||||
|
5. Select the PSA file that you want to import animations from and click "Select".
|
||||||
|
6. In the Actions box, select which animations you want to import.
|
||||||
|
7. Click "Import".
|
||||||
|
|
||||||
# FAQ
|
# FAQ
|
||||||
## Can I use this addon to import PSK and PSA files?
|
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
|
||||||
Currently, no.
|
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.
|
||||||
|
|
||||||
Presumably you are using this in concert with the [UE Viewer](https://www.gildor.org/en/projects/umodel) program to import extracted meshes. It is *not recommended* to export PSK/PSA from UE Viewer since it [does not preserve smoothing groups](https://github.com/gildor2/UEViewer/issues/235). As a result, the shading of imported models will be incorrect and will need to be manually fixed. Instead, it is recommended to export meshes to the glTF format for import into Blender since it preserves the correct mesh shading.
|
As a workaround, it is recommended to export [glTF](https://en.wikipedia.org/wiki/GlTF) meshes out of UE Viewer instead, since the glTF format has support for explicit normals and UE Viewer can correctly preserve the mesh normals on export. Note, however, that the imported glTF armature may have it's bones oriented incorrectly when imported into blender. To mitigate this, you can combine the armature of PSK and the mesh of the glTF for best results.
|
||||||
|
|
||||||
Regardless, if you are dead set on using a PSK/PSA importer, use [this one](https://github.com/Befzz/blender3d_import_psk_psa).
|
|
||||||
|
@ -1,73 +0,0 @@
|
|||||||
bl_info = {
|
|
||||||
"name": "PSK/PSA Exporter",
|
|
||||||
"author": "Colin Basnett",
|
|
||||||
"version": (1, 1, 1),
|
|
||||||
"blender": (2, 80, 0),
|
|
||||||
"location": "File > Export > PSK Export (.psk)",
|
|
||||||
"description": "PSK/PSA Export (.psk)",
|
|
||||||
"warning": "",
|
|
||||||
"wiki_url": "https://github.com/DarklightGames/io_export_psk_psa",
|
|
||||||
"tracker_url": "https://github.com/DarklightGames/io_export_psk_psa/issues",
|
|
||||||
"category": "Import-Export"
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'bpy' in locals():
|
|
||||||
import importlib
|
|
||||||
importlib.reload(psk_data)
|
|
||||||
importlib.reload(psk_builder)
|
|
||||||
importlib.reload(psk_exporter)
|
|
||||||
importlib.reload(psk_operator)
|
|
||||||
importlib.reload(psa_data)
|
|
||||||
importlib.reload(psa_builder)
|
|
||||||
importlib.reload(psa_exporter)
|
|
||||||
importlib.reload(psa_operator)
|
|
||||||
else:
|
|
||||||
# if i remove this line, it can be enabled just fine
|
|
||||||
from .psk import data as psk_data
|
|
||||||
from .psk import builder as psk_builder
|
|
||||||
from .psk import exporter as psk_exporter
|
|
||||||
from .psk import operator as psk_operator
|
|
||||||
from .psa import data as psa_data
|
|
||||||
from .psa import builder as psa_builder
|
|
||||||
from .psa import exporter as psa_exporter
|
|
||||||
from .psa import operator as psa_operator
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
from bpy.props import IntProperty, CollectionProperty
|
|
||||||
|
|
||||||
classes = [
|
|
||||||
psk_operator.PskExportOperator,
|
|
||||||
psa_operator.PsaExportOperator,
|
|
||||||
psa_operator.PSA_UL_ActionList,
|
|
||||||
psa_operator.ActionListItem
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def psk_menu_func(self, context):
|
|
||||||
self.layout.operator(psk_operator.PskExportOperator.bl_idname, text ='Unreal PSK (.psk)')
|
|
||||||
|
|
||||||
|
|
||||||
def psa_menu_func(self, context):
|
|
||||||
self.layout.operator(psa_operator.PsaExportOperator.bl_idname, text='Unreal PSA (.psa)')
|
|
||||||
|
|
||||||
|
|
||||||
def register():
|
|
||||||
for cls in classes:
|
|
||||||
bpy.utils.register_class(cls)
|
|
||||||
bpy.types.TOPBAR_MT_file_export.append(psk_menu_func)
|
|
||||||
bpy.types.TOPBAR_MT_file_export.append(psa_menu_func)
|
|
||||||
bpy.types.Scene.psa_action_list = CollectionProperty(type=psa_operator.ActionListItem)
|
|
||||||
bpy.types.Scene.psa_action_list_index = IntProperty(name='index for list??', default=0)
|
|
||||||
|
|
||||||
|
|
||||||
def unregister():
|
|
||||||
del bpy.types.Scene.psa_action_list_index
|
|
||||||
del bpy.types.Scene.psa_action_list
|
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(psa_menu_func)
|
|
||||||
bpy.types.TOPBAR_MT_file_export.remove(psk_menu_func)
|
|
||||||
for cls in reversed(classes):
|
|
||||||
bpy.utils.unregister_class(cls)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
register()
|
|
@ -1,27 +0,0 @@
|
|||||||
from typing import Type
|
|
||||||
from .data import *
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExporter(object):
|
|
||||||
def __init__(self, psa: Psa):
|
|
||||||
self.psa: Psa = psa
|
|
||||||
|
|
||||||
# This method is shared by both PSA/K file formats, move this?
|
|
||||||
@staticmethod
|
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
|
||||||
section = Section()
|
|
||||||
section.name = name
|
|
||||||
if data_type is not None and data is not None:
|
|
||||||
section.data_size = sizeof(data_type)
|
|
||||||
section.data_count = len(data)
|
|
||||||
fp.write(section)
|
|
||||||
if data is not None:
|
|
||||||
for datum in data:
|
|
||||||
fp.write(datum)
|
|
||||||
|
|
||||||
def export(self, path: str):
|
|
||||||
with open(path, 'wb') as fp:
|
|
||||||
self.write_section(fp, b'ANIMHEAD')
|
|
||||||
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones)
|
|
||||||
self.write_section(fp, b'ANIMINFO', Psa.Sequence, self.psa.sequences)
|
|
||||||
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys)
|
|
@ -1,105 +0,0 @@
|
|||||||
from bpy.types import Operator, Action, UIList, PropertyGroup
|
|
||||||
from bpy_extras.io_utils import ExportHelper
|
|
||||||
from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerProperty
|
|
||||||
from .builder import PsaBuilder, PsaBuilderOptions
|
|
||||||
from .exporter import PsaExporter
|
|
||||||
import bpy
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class ActionListItem(PropertyGroup):
|
|
||||||
action: PointerProperty(type=Action)
|
|
||||||
is_selected: BoolProperty(default=False)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self.action.name
|
|
||||||
|
|
||||||
|
|
||||||
class PSA_UL_ActionList(UIList):
|
|
||||||
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
|
||||||
layout.alignment = 'LEFT'
|
|
||||||
layout.prop(item, 'is_selected', icon_only=True)
|
|
||||||
layout.label(text=item.action.name)
|
|
||||||
|
|
||||||
def filter_items(self, context, data, property):
|
|
||||||
# TODO: returns two lists, apparently
|
|
||||||
actions = getattr(data, property)
|
|
||||||
flt_flags = []
|
|
||||||
flt_neworder = []
|
|
||||||
if self.filter_name:
|
|
||||||
flt_flags = bpy.types.UI_UL_list.filter_items_by_name(self.filter_name, self.bitflag_filter_item, actions, 'name', reverse=self.use_filter_invert)
|
|
||||||
return flt_flags, flt_neworder
|
|
||||||
|
|
||||||
|
|
||||||
class PsaExportOperator(Operator, ExportHelper):
|
|
||||||
bl_idname = 'export.psa'
|
|
||||||
bl_label = 'Export'
|
|
||||||
__doc__ = 'PSA Exporter (.psa)'
|
|
||||||
filename_ext = '.psa'
|
|
||||||
filter_glob : StringProperty(default='*.psa', options={'HIDDEN'})
|
|
||||||
filepath : StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for exporting the PSA file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.armature = None
|
|
||||||
|
|
||||||
def draw(self, context):
|
|
||||||
layout = self.layout
|
|
||||||
scene = context.scene
|
|
||||||
box = layout.box()
|
|
||||||
box.label(text='Actions', icon='ACTION')
|
|
||||||
row = box.row()
|
|
||||||
row.template_list('PSA_UL_ActionList', 'asd', scene, 'psa_action_list', scene, 'psa_action_list_index', rows=len(context.scene.psa_action_list))
|
|
||||||
|
|
||||||
def is_action_for_armature(self, action):
|
|
||||||
if len(action.fcurves) == 0:
|
|
||||||
return False
|
|
||||||
bone_names = set([x.name for x in self.armature.data.bones])
|
|
||||||
for fcurve in action.fcurves:
|
|
||||||
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path)
|
|
||||||
if not match:
|
|
||||||
continue
|
|
||||||
bone_name = match.group(1)
|
|
||||||
if bone_name in bone_names:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
if context.view_layer.objects.active.type != 'ARMATURE':
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'The selected object must be an armature.')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
self.armature = context.view_layer.objects.active
|
|
||||||
|
|
||||||
context.scene.psa_action_list.clear()
|
|
||||||
for action in bpy.data.actions:
|
|
||||||
item = context.scene.psa_action_list.add()
|
|
||||||
item.action = action
|
|
||||||
if self.is_action_for_armature(action):
|
|
||||||
item.is_selected = True
|
|
||||||
|
|
||||||
if len(context.scene.psa_action_list) == 0:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'There are no actions to export.')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
return {'RUNNING_MODAL'}
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
actions = [x.action for x in context.scene.psa_action_list if x.is_selected]
|
|
||||||
|
|
||||||
if len(actions) == 0:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, 'No actions were selected for export.')
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
options = PsaBuilderOptions()
|
|
||||||
options.actions = actions
|
|
||||||
builder = PsaBuilder()
|
|
||||||
psk = builder.build(context, options)
|
|
||||||
exporter = PsaExporter(psk)
|
|
||||||
exporter.export(self.filepath)
|
|
||||||
return {'FINISHED'}
|
|
@ -1,45 +0,0 @@
|
|||||||
from typing import Type
|
|
||||||
from .data import *
|
|
||||||
|
|
||||||
|
|
||||||
class PskExporter(object):
|
|
||||||
def __init__(self, psk: Psk):
|
|
||||||
self.psk: Psk = psk
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
|
||||||
section = Section()
|
|
||||||
section.name = name
|
|
||||||
if data_type is not None and data is not None:
|
|
||||||
section.data_size = sizeof(data_type)
|
|
||||||
section.data_count = len(data)
|
|
||||||
fp.write(section)
|
|
||||||
if data is not None:
|
|
||||||
for datum in data:
|
|
||||||
fp.write(datum)
|
|
||||||
|
|
||||||
def export(self, path: str):
|
|
||||||
with open(path, 'wb') as fp:
|
|
||||||
self.write_section(fp, b'ACTRHEAD')
|
|
||||||
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points)
|
|
||||||
|
|
||||||
# WEDGES
|
|
||||||
if len(self.psk.wedges) <= 65536:
|
|
||||||
wedge_type = Psk.Wedge16
|
|
||||||
else:
|
|
||||||
wedge_type = Psk.Wedge32
|
|
||||||
|
|
||||||
wedges = []
|
|
||||||
for index, w in enumerate(self.psk.wedges):
|
|
||||||
wedge = wedge_type()
|
|
||||||
wedge.material_index = w.material_index
|
|
||||||
wedge.u = w.u
|
|
||||||
wedge.v = w.v
|
|
||||||
wedge.point_index = w.point_index
|
|
||||||
wedges.append(wedge)
|
|
||||||
|
|
||||||
self.write_section(fp, b'VTXW0000', wedge_type, wedges)
|
|
||||||
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces)
|
|
||||||
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials)
|
|
||||||
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones)
|
|
||||||
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights)
|
|
@ -1,36 +0,0 @@
|
|||||||
from bpy.types import Operator
|
|
||||||
from bpy_extras.io_utils import ExportHelper
|
|
||||||
from bpy.props import StringProperty, BoolProperty, FloatProperty
|
|
||||||
from .builder import PskBuilder
|
|
||||||
from .exporter import PskExporter
|
|
||||||
|
|
||||||
|
|
||||||
class PskExportOperator(Operator, ExportHelper):
|
|
||||||
bl_idname = 'export.psk'
|
|
||||||
bl_label = 'Export'
|
|
||||||
__doc__ = 'PSK Exporter (.psk)'
|
|
||||||
filename_ext = '.psk'
|
|
||||||
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
|
||||||
|
|
||||||
filepath: StringProperty(
|
|
||||||
name='File Path',
|
|
||||||
description='File path used for exporting the PSK file',
|
|
||||||
maxlen=1024,
|
|
||||||
default='')
|
|
||||||
|
|
||||||
def invoke(self, context, event):
|
|
||||||
try:
|
|
||||||
PskBuilder.get_input_objects(context)
|
|
||||||
except RuntimeError as e:
|
|
||||||
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
|
||||||
return {'CANCELLED'}
|
|
||||||
|
|
||||||
context.window_manager.fileselect_add(self)
|
|
||||||
return {'RUNNING_MODAL'}
|
|
||||||
|
|
||||||
def execute(self, context):
|
|
||||||
builder = PskBuilder()
|
|
||||||
psk = builder.build(context)
|
|
||||||
exporter = PskExporter(psk)
|
|
||||||
exporter.export(self.filepath)
|
|
||||||
return {'FINISHED'}
|
|
90
io_scene_psk_psa/__init__.py
Normal file
90
io_scene_psk_psa/__init__.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
bl_info = {
|
||||||
|
"name": "PSK/PSA Importer/Exporter",
|
||||||
|
"author": "Colin Basnett",
|
||||||
|
"version": (1, 2, 0),
|
||||||
|
"blender": (2, 80, 0),
|
||||||
|
# "location": "File > Export > PSK Export (.psk)",
|
||||||
|
"description": "PSK/PSA Import/Export (.psk/.psa)",
|
||||||
|
"warning": "",
|
||||||
|
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
|
||||||
|
"tracker_url": "https://github.com/DarklightGames/io_scene_psk_psa/issues",
|
||||||
|
"category": "Import-Export"
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'bpy' in locals():
|
||||||
|
import importlib
|
||||||
|
importlib.reload(psx_data)
|
||||||
|
importlib.reload(psx_helpers)
|
||||||
|
importlib.reload(psx_types)
|
||||||
|
importlib.reload(psk_data)
|
||||||
|
importlib.reload(psk_builder)
|
||||||
|
importlib.reload(psk_exporter)
|
||||||
|
importlib.reload(psk_importer)
|
||||||
|
importlib.reload(psk_reader)
|
||||||
|
importlib.reload(psa_data)
|
||||||
|
importlib.reload(psa_builder)
|
||||||
|
importlib.reload(psa_exporter)
|
||||||
|
importlib.reload(psa_reader)
|
||||||
|
importlib.reload(psa_importer)
|
||||||
|
else:
|
||||||
|
# if i remove this line, it can be enabled just fine
|
||||||
|
from . import data as psx_data
|
||||||
|
from . import helpers as psx_helpers
|
||||||
|
from . import types as psx_types
|
||||||
|
from .psk import data as psk_data
|
||||||
|
from .psk import builder as psk_builder
|
||||||
|
from .psk import exporter as psk_exporter
|
||||||
|
from .psk import reader as psk_reader
|
||||||
|
from .psk import importer as psk_importer
|
||||||
|
from .psa import data as psa_data
|
||||||
|
from .psa import builder as psa_builder
|
||||||
|
from .psa import exporter as psa_exporter
|
||||||
|
from .psa import reader as psa_reader
|
||||||
|
from .psa import importer as psa_importer
|
||||||
|
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
from bpy.props import PointerProperty
|
||||||
|
|
||||||
|
classes = psx_types.__classes__ + \
|
||||||
|
psk_importer.__classes__ + \
|
||||||
|
psk_exporter.__classes__ + \
|
||||||
|
psa_exporter.__classes__ + \
|
||||||
|
psa_importer.__classes__
|
||||||
|
|
||||||
|
|
||||||
|
def psk_export_menu_func(self, context):
|
||||||
|
self.layout.operator(psk_exporter.PskExportOperator.bl_idname, text='Unreal PSK (.psk)')
|
||||||
|
|
||||||
|
|
||||||
|
def psk_import_menu_func(self, context):
|
||||||
|
self.layout.operator(psk_importer.PskImportOperator.bl_idname, text='Unreal PSK (.psk)')
|
||||||
|
|
||||||
|
|
||||||
|
def psa_export_menu_func(self, context):
|
||||||
|
self.layout.operator(psa_exporter.PsaExportOperator.bl_idname, text='Unreal PSA (.psa)')
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
for cls in classes:
|
||||||
|
bpy.utils.register_class(cls)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.append(psk_export_menu_func)
|
||||||
|
bpy.types.TOPBAR_MT_file_import.append(psk_import_menu_func)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.append(psa_export_menu_func)
|
||||||
|
bpy.types.Scene.psa_import = PointerProperty(type=psa_importer.PsaImportPropertyGroup)
|
||||||
|
bpy.types.Scene.psa_export = PointerProperty(type=psa_exporter.PsaExportPropertyGroup)
|
||||||
|
bpy.types.Scene.psk_export = PointerProperty(type=psk_exporter.PskExportPropertyGroup)
|
||||||
|
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
del bpy.types.Scene.psa_export
|
||||||
|
del bpy.types.Scene.psa_import
|
||||||
|
bpy.types.TOPBAR_MT_file_export.remove(psk_export_menu_func)
|
||||||
|
bpy.types.TOPBAR_MT_file_import.remove(psk_import_menu_func)
|
||||||
|
bpy.types.TOPBAR_MT_file_export.remove(psa_export_menu_func)
|
||||||
|
for cls in reversed(classes):
|
||||||
|
bpy.utils.unregister_class(cls)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
register()
|
@ -8,6 +8,14 @@ class Vector3(Structure):
|
|||||||
('z', c_float),
|
('z', c_float),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
yield self.x
|
||||||
|
yield self.y
|
||||||
|
yield self.z
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(tuple(self))
|
||||||
|
|
||||||
|
|
||||||
class Quaternion(Structure):
|
class Quaternion(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
@ -17,6 +25,15 @@ class Quaternion(Structure):
|
|||||||
('w', c_float),
|
('w', c_float),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
yield self.w
|
||||||
|
yield self.x
|
||||||
|
yield self.y
|
||||||
|
yield self.z
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(tuple(self))
|
||||||
|
|
||||||
|
|
||||||
class Section(Structure):
|
class Section(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
61
io_scene_psk_psa/helpers.py
Normal file
61
io_scene_psk_psa/helpers.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
def populate_bone_group_list(armature_object, bone_group_list):
|
||||||
|
bone_group_list.clear()
|
||||||
|
|
||||||
|
item = bone_group_list.add()
|
||||||
|
item.name = '(unassigned)'
|
||||||
|
item.index = -1
|
||||||
|
item.is_selected = True
|
||||||
|
|
||||||
|
if armature_object and armature_object.pose:
|
||||||
|
for bone_group_index, bone_group in enumerate(armature_object.pose.bone_groups):
|
||||||
|
item = bone_group_list.add()
|
||||||
|
item.name = bone_group.name
|
||||||
|
item.index = bone_group_index
|
||||||
|
item.is_selected = True
|
||||||
|
|
||||||
|
|
||||||
|
def add_bone_groups_to_layout(layout):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_export_bone_indices_for_bone_groups(armature_object, bone_group_indices: List[int]) -> List[int]:
|
||||||
|
"""
|
||||||
|
Returns a sorted list of bone indices that should be exported for the given bone groups.
|
||||||
|
|
||||||
|
Note that the ancestors of bones within the bone groups will also be present in the returned list.
|
||||||
|
|
||||||
|
:param armature_object: Blender object with type 'ARMATURE'
|
||||||
|
:param bone_group_indices: List of bone group indices to be exported.
|
||||||
|
:return: A sorted list of bone indices that should be exported.
|
||||||
|
"""
|
||||||
|
if armature_object is None or armature_object.type != 'ARMATURE':
|
||||||
|
raise ValueError('An armature object must be supplied')
|
||||||
|
|
||||||
|
bones = armature_object.data.bones
|
||||||
|
pose_bones = armature_object.pose.bones
|
||||||
|
bone_names = [x.name for x in bones]
|
||||||
|
|
||||||
|
# Get a list of the bone indices that are explicitly part of the bone groups we are including.
|
||||||
|
bone_index_stack = []
|
||||||
|
is_exporting_none_bone_groups = -1 in bone_group_indices
|
||||||
|
for bone_index, pose_bone in enumerate(pose_bones):
|
||||||
|
if (pose_bone.bone_group is None and is_exporting_none_bone_groups) or \
|
||||||
|
(pose_bone.bone_group is not None and pose_bone.bone_group_index in bone_group_indices):
|
||||||
|
bone_index_stack.append(bone_index)
|
||||||
|
|
||||||
|
# For each bone that is explicitly being added, recursively walk up the hierarchy and ensure that all of
|
||||||
|
# those ancestor bone indices are also in the list.
|
||||||
|
bone_indices = set()
|
||||||
|
while len(bone_index_stack) > 0:
|
||||||
|
bone_index = bone_index_stack.pop()
|
||||||
|
bone = bones[bone_index]
|
||||||
|
if bone.parent is not None:
|
||||||
|
parent_bone_index = bone_names.index(bone.parent.name)
|
||||||
|
if parent_bone_index not in bone_indices:
|
||||||
|
bone_index_stack.append(parent_bone_index)
|
||||||
|
bone_indices.add(bone_index)
|
||||||
|
|
||||||
|
return list(sorted(list(bone_indices)))
|
@ -1,18 +1,19 @@
|
|||||||
from .data import *
|
from .data import *
|
||||||
|
from ..helpers import *
|
||||||
|
|
||||||
|
|
||||||
class PsaBuilderOptions(object):
|
class PsaBuilderOptions(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.actions = []
|
self.actions = []
|
||||||
|
self.bone_filter_mode = 'ALL'
|
||||||
|
self.bone_group_indices = []
|
||||||
|
|
||||||
|
|
||||||
# https://git.cth451.me/cth451/blender-addons/blob/master/io_export_unreal_psk_psa.py
|
|
||||||
class PsaBuilder(object):
|
class PsaBuilder(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# TODO: add options in here (selected anims, eg.)
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def build(self, context, options) -> Psa:
|
def build(self, context, options: PsaBuilderOptions) -> Psa:
|
||||||
object = context.view_layer.objects.active
|
object = context.view_layer.objects.active
|
||||||
|
|
||||||
if object.type != 'ARMATURE':
|
if object.type != 'ARMATURE':
|
||||||
@ -32,31 +33,54 @@ class PsaBuilder(object):
|
|||||||
# armature bones.
|
# armature bones.
|
||||||
bone_names = [x.name for x in bones]
|
bone_names = [x.name for x in bones]
|
||||||
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
|
pose_bones = [(bone_names.index(bone.name), bone) for bone in armature.pose.bones]
|
||||||
|
del bone_names
|
||||||
pose_bones.sort(key=lambda x: x[0])
|
pose_bones.sort(key=lambda x: x[0])
|
||||||
pose_bones = [x[1] for x in pose_bones]
|
pose_bones = [x[1] for x in pose_bones]
|
||||||
|
|
||||||
for bone in bones:
|
bone_indices = list(range(len(bones)))
|
||||||
|
|
||||||
|
# If bone groups are specified, get only the bones that are in that specified bone groups and their ancestors.
|
||||||
|
if options.bone_filter_mode == 'BONE_GROUPS':
|
||||||
|
bone_indices = get_export_bone_indices_for_bone_groups(armature, options.bone_group_indices)
|
||||||
|
|
||||||
|
# Make the bone lists contain only the bones that are going to be exported.
|
||||||
|
bones = [bones[bone_index] for bone_index in bone_indices]
|
||||||
|
pose_bones = [pose_bones[bone_index] for bone_index in bone_indices]
|
||||||
|
|
||||||
|
if len(bones) == 0:
|
||||||
|
# No bones are going to be exported.
|
||||||
|
raise RuntimeError('No bones available for export')
|
||||||
|
|
||||||
|
# Ensure that the exported hierarchy has a single root bone.
|
||||||
|
root_bones = [x for x in bones if x.parent is None]
|
||||||
|
if len(root_bones) > 1:
|
||||||
|
root_bone_names = [x.name for x in bones]
|
||||||
|
raise RuntimeError('Exported bone hierarchy must have a single root bone.'
|
||||||
|
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}')
|
||||||
|
|
||||||
|
for pose_bone in bones:
|
||||||
psa_bone = Psa.Bone()
|
psa_bone = Psa.Bone()
|
||||||
psa_bone.name = bytes(bone.name, encoding='utf-8')
|
psa_bone.name = bytes(pose_bone.name, encoding='utf-8')
|
||||||
psa_bone.children_count = len(bone.children)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
psa_bone.parent_index = bones.index(bone.parent)
|
parent_index = bones.index(pose_bone.parent)
|
||||||
|
psa_bone.parent_index = parent_index
|
||||||
|
psa.bones[parent_index].children_count += 1
|
||||||
except ValueError:
|
except ValueError:
|
||||||
psa_bone.parent_index = -1
|
psa_bone.parent_index = -1
|
||||||
|
|
||||||
if bone.parent is not None:
|
if pose_bone.parent is not None:
|
||||||
rotation = bone.matrix.to_quaternion()
|
rotation = pose_bone.matrix.to_quaternion()
|
||||||
rotation.x = -rotation.x
|
rotation.x = -rotation.x
|
||||||
rotation.y = -rotation.y
|
rotation.y = -rotation.y
|
||||||
rotation.z = -rotation.z
|
rotation.z = -rotation.z
|
||||||
quat_parent = bone.parent.matrix.to_quaternion().inverted()
|
quat_parent = pose_bone.parent.matrix.to_quaternion().inverted()
|
||||||
parent_head = quat_parent @ bone.parent.head
|
parent_head = quat_parent @ pose_bone.parent.head
|
||||||
parent_tail = quat_parent @ bone.parent.tail
|
parent_tail = quat_parent @ pose_bone.parent.tail
|
||||||
location = (parent_tail - parent_head) + bone.head
|
location = (parent_tail - parent_head) + pose_bone.head
|
||||||
else:
|
else:
|
||||||
location = armature.matrix_local @ bone.head
|
location = armature.matrix_local @ pose_bone.head
|
||||||
rot_matrix = bone.matrix @ armature.matrix_local.to_3x3()
|
rot_matrix = pose_bone.matrix @ armature.matrix_local.to_3x3()
|
||||||
rotation = rot_matrix.to_quaternion()
|
rotation = rot_matrix.to_quaternion()
|
||||||
|
|
||||||
psa_bone.location.x = location.x
|
psa_bone.location.x = location.x
|
||||||
@ -92,18 +116,18 @@ class PsaBuilder(object):
|
|||||||
for frame in range(frame_count):
|
for frame in range(frame_count):
|
||||||
context.scene.frame_set(frame_min + frame)
|
context.scene.frame_set(frame_min + frame)
|
||||||
|
|
||||||
for bone in pose_bones:
|
for pose_bone in pose_bones:
|
||||||
key = Psa.Key()
|
key = Psa.Key()
|
||||||
pose_bone_matrix = bone.matrix
|
pose_bone_matrix = pose_bone.matrix
|
||||||
|
|
||||||
if bone.parent is not None:
|
if pose_bone.parent is not None:
|
||||||
pose_bone_parent_matrix = bone.parent.matrix
|
pose_bone_parent_matrix = pose_bone.parent.matrix
|
||||||
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
pose_bone_matrix = pose_bone_parent_matrix.inverted() @ pose_bone_matrix
|
||||||
|
|
||||||
location = pose_bone_matrix.to_translation()
|
location = pose_bone_matrix.to_translation()
|
||||||
rotation = pose_bone_matrix.to_quaternion().normalized()
|
rotation = pose_bone_matrix.to_quaternion().normalized()
|
||||||
|
|
||||||
if bone.parent is not None:
|
if pose_bone.parent is not None:
|
||||||
rotation.x = -rotation.x
|
rotation.x = -rotation.x
|
||||||
rotation.y = -rotation.y
|
rotation.y = -rotation.y
|
||||||
rotation.z = -rotation.z
|
rotation.z = -rotation.z
|
||||||
@ -124,6 +148,6 @@ class PsaBuilder(object):
|
|||||||
sequence.bone_count = len(pose_bones)
|
sequence.bone_count = len(pose_bones)
|
||||||
sequence.track_time = frame_count
|
sequence.track_time = frame_count
|
||||||
|
|
||||||
psa.sequences.append(sequence)
|
psa.sequences[action.name] = sequence
|
||||||
|
|
||||||
return psa
|
return psa
|
@ -1,9 +1,15 @@
|
|||||||
|
import typing
|
||||||
from typing import List
|
from typing import List
|
||||||
|
from collections import OrderedDict
|
||||||
from ..data import *
|
from ..data import *
|
||||||
|
|
||||||
|
"""
|
||||||
|
Note that keys are not stored within the Psa object.
|
||||||
|
Use the PsaReader::get_sequence_keys to get a the keys for a sequence.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Psa(object):
|
class Psa(object):
|
||||||
|
|
||||||
class Bone(Structure):
|
class Bone(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
('name', c_char * 64),
|
('name', c_char * 64),
|
||||||
@ -22,7 +28,7 @@ class Psa(object):
|
|||||||
('bone_count', c_int32),
|
('bone_count', c_int32),
|
||||||
('root_include', c_int32),
|
('root_include', c_int32),
|
||||||
('compression_style', c_int32),
|
('compression_style', c_int32),
|
||||||
('key_quotum', c_int32), # what the fuck is a quotum
|
('key_quotum', c_int32),
|
||||||
('key_reduction', c_float),
|
('key_reduction', c_float),
|
||||||
('track_time', c_float),
|
('track_time', c_float),
|
||||||
('fps', c_float),
|
('fps', c_float),
|
||||||
@ -38,7 +44,20 @@ class Psa(object):
|
|||||||
('time', c_float)
|
('time', c_float)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
yield self.rotation.w
|
||||||
|
yield self.rotation.x
|
||||||
|
yield self.rotation.y
|
||||||
|
yield self.rotation.z
|
||||||
|
yield self.location.x
|
||||||
|
yield self.location.y
|
||||||
|
yield self.location.z
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return repr((self.location, self.rotation, self.time))
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.bones: List[Psa.Bone] = []
|
self.bones: List[Psa.Bone] = []
|
||||||
self.sequences: List[Psa.Sequence] = []
|
self.sequences: typing.OrderedDict[Psa.Sequence] = OrderedDict()
|
||||||
self.keys: List[Psa.Key] = []
|
self.keys: List[Psa.Key] = []
|
253
io_scene_psk_psa/psa/exporter.py
Normal file
253
io_scene_psk_psa/psa/exporter.py
Normal file
@ -0,0 +1,253 @@
|
|||||||
|
import bpy
|
||||||
|
from bpy.types import Operator, PropertyGroup, Action, UIList, BoneGroup
|
||||||
|
from bpy.props import CollectionProperty, IntProperty, PointerProperty, StringProperty, BoolProperty, EnumProperty
|
||||||
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
from typing import Type
|
||||||
|
from .builder import PsaBuilder, PsaBuilderOptions
|
||||||
|
from .data import *
|
||||||
|
from ..types import BoneGroupListItem
|
||||||
|
from ..helpers import *
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExporter(object):
|
||||||
|
def __init__(self, psa: Psa):
|
||||||
|
self.psa: Psa = psa
|
||||||
|
|
||||||
|
# This method is shared by both PSA/K file formats, move this?
|
||||||
|
@staticmethod
|
||||||
|
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||||
|
section = Section()
|
||||||
|
section.name = name
|
||||||
|
if data_type is not None and data is not None:
|
||||||
|
section.data_size = sizeof(data_type)
|
||||||
|
section.data_count = len(data)
|
||||||
|
fp.write(section)
|
||||||
|
if data is not None:
|
||||||
|
for datum in data:
|
||||||
|
fp.write(datum)
|
||||||
|
|
||||||
|
def export(self, path: str):
|
||||||
|
with open(path, 'wb') as fp:
|
||||||
|
self.write_section(fp, b'ANIMHEAD')
|
||||||
|
self.write_section(fp, b'BONENAMES', Psa.Bone, self.psa.bones)
|
||||||
|
self.write_section(fp, b'ANIMINFO', Psa.Sequence, list(self.psa.sequences.values()))
|
||||||
|
self.write_section(fp, b'ANIMKEYS', Psa.Key, self.psa.keys)
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExportActionListItem(PropertyGroup):
|
||||||
|
action: PointerProperty(type=Action)
|
||||||
|
action_name: StringProperty()
|
||||||
|
is_selected: BoolProperty(default=False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.action.name
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExportPropertyGroup(PropertyGroup):
|
||||||
|
action_list: CollectionProperty(type=PsaExportActionListItem)
|
||||||
|
action_list_index: IntProperty(default=0)
|
||||||
|
bone_filter_mode: EnumProperty(
|
||||||
|
name='Bone Filter',
|
||||||
|
description='',
|
||||||
|
items=(
|
||||||
|
('ALL', 'All', 'All bones will be exported.'),
|
||||||
|
('BONE_GROUPS', 'Bone Groups', 'Only bones belonging to the selected bone groups and their ancestors will be exported.')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
||||||
|
bone_group_list_index: IntProperty(default=0)
|
||||||
|
|
||||||
|
|
||||||
|
def is_bone_filter_mode_item_available(context, identifier):
|
||||||
|
if identifier == "BONE_GROUPS":
|
||||||
|
obj = context.active_object
|
||||||
|
if not obj.pose or not obj.pose.bone_groups:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExportOperator(Operator, ExportHelper):
|
||||||
|
bl_idname = 'export.psa'
|
||||||
|
bl_label = 'Export'
|
||||||
|
__doc__ = 'Export actions to PSA'
|
||||||
|
filename_ext = '.psa'
|
||||||
|
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSA file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.armature = None
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
|
||||||
|
# ACTIONS
|
||||||
|
box = layout.box()
|
||||||
|
box.label(text='Actions', icon='ACTION')
|
||||||
|
row = box.row()
|
||||||
|
row.template_list('PSA_UL_ExportActionList', 'asd', property_group, 'action_list', property_group, 'action_list_index', rows=10)
|
||||||
|
row = box.row(align=True)
|
||||||
|
row.label(text='Select')
|
||||||
|
row.operator('psa_export.actions_select_all', text='All')
|
||||||
|
row.operator('psa_export.actions_deselect_all', text='None')
|
||||||
|
|
||||||
|
# BONES
|
||||||
|
box = layout.box()
|
||||||
|
box.label(text='Bones', icon='BONE_DATA')
|
||||||
|
bone_filter_mode_items = property_group.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||||
|
row = box.row(align=True)
|
||||||
|
for item in bone_filter_mode_items:
|
||||||
|
identifier = item.identifier
|
||||||
|
item_layout = row.row(align=True)
|
||||||
|
item_layout.prop_enum(property_group, 'bone_filter_mode', item.identifier)
|
||||||
|
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
||||||
|
|
||||||
|
if property_group.bone_filter_mode == 'BONE_GROUPS':
|
||||||
|
box = layout.box()
|
||||||
|
row = box.row()
|
||||||
|
rows = max(3, min(len(property_group.bone_group_list), 10))
|
||||||
|
row.template_list('PSX_UL_BoneGroupList', '', property_group, 'bone_group_list', property_group, 'bone_group_list_index', rows=rows)
|
||||||
|
|
||||||
|
def is_action_for_armature(self, action):
|
||||||
|
if len(action.fcurves) == 0:
|
||||||
|
return False
|
||||||
|
bone_names = set([x.name for x in self.armature.data.bones])
|
||||||
|
for fcurve in action.fcurves:
|
||||||
|
match = re.match(r'pose\.bones\["(.+)"\].\w+', fcurve.data_path)
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
bone_name = match.group(1)
|
||||||
|
if bone_name in bone_names:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
|
||||||
|
if context.view_layer.objects.active is None:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'An armature must be selected')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
if context.view_layer.objects.active.type != 'ARMATURE':
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'The selected object must be an armature.')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
self.armature = context.view_layer.objects.active
|
||||||
|
|
||||||
|
# Populate actions list.
|
||||||
|
property_group.action_list.clear()
|
||||||
|
for action in bpy.data.actions:
|
||||||
|
item = property_group.action_list.add()
|
||||||
|
item.action = action
|
||||||
|
item.action_name = action.name
|
||||||
|
if self.is_action_for_armature(action):
|
||||||
|
item.is_selected = True
|
||||||
|
|
||||||
|
if len(property_group.action_list) == 0:
|
||||||
|
# If there are no actions at all, we have nothing to export, so just cancel the operation.
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'There are no actions to export.')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
# Populate bone groups list.
|
||||||
|
populate_bone_group_list(self.armature, property_group.bone_group_list)
|
||||||
|
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
actions = [x.action for x in property_group.action_list if x.is_selected]
|
||||||
|
|
||||||
|
if len(actions) == 0:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, 'No actions were selected for export.')
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
options = PsaBuilderOptions()
|
||||||
|
options.actions = actions
|
||||||
|
options.bone_filter_mode = property_group.bone_filter_mode
|
||||||
|
options.bone_group_indices = [x.index for x in property_group.bone_group_list if x.is_selected]
|
||||||
|
builder = PsaBuilder()
|
||||||
|
try:
|
||||||
|
psa = builder.build(context, options)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
exporter = PsaExporter(psa)
|
||||||
|
exporter.export(self.filepath)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_ExportActionList(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
layout.alignment = 'LEFT'
|
||||||
|
layout.prop(item, 'is_selected', icon_only=True)
|
||||||
|
layout.label(text=item.action_name)
|
||||||
|
|
||||||
|
def filter_items(self, context, data, property):
|
||||||
|
actions = getattr(data, property)
|
||||||
|
flt_flags = []
|
||||||
|
flt_neworder = []
|
||||||
|
if self.filter_name:
|
||||||
|
flt_flags = bpy.types.UI_UL_list.filter_items_by_name(
|
||||||
|
self.filter_name,
|
||||||
|
self.bitflag_filter_item,
|
||||||
|
actions,
|
||||||
|
'action_name',
|
||||||
|
reverse=self.use_filter_invert
|
||||||
|
)
|
||||||
|
return flt_flags, flt_neworder
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExportSelectAll(bpy.types.Operator):
|
||||||
|
bl_idname = 'psa_export.actions_select_all'
|
||||||
|
bl_label = 'Select All'
|
||||||
|
bl_description = 'Select all actions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
action_list = property_group.action_list
|
||||||
|
has_unselected_actions = any(map(lambda action: not action.is_selected, action_list))
|
||||||
|
return len(action_list) > 0 and has_unselected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
for action in property_group.action_list:
|
||||||
|
action.is_selected = True
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PsaExportDeselectAll(bpy.types.Operator):
|
||||||
|
bl_idname = 'psa_export.actions_deselect_all'
|
||||||
|
bl_label = 'Deselect All'
|
||||||
|
bl_description = 'Deselect all actions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
action_list = property_group.action_list
|
||||||
|
has_selected_actions = any(map(lambda action: action.is_selected, action_list))
|
||||||
|
return len(action_list) > 0 and has_selected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_export
|
||||||
|
for action in property_group.action_list:
|
||||||
|
action.is_selected = False
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
__classes__ = [
|
||||||
|
PsaExportActionListItem,
|
||||||
|
PsaExportPropertyGroup,
|
||||||
|
PsaExportOperator,
|
||||||
|
PSA_UL_ExportActionList,
|
||||||
|
PsaExportSelectAll,
|
||||||
|
PsaExportDeselectAll,
|
||||||
|
]
|
411
io_scene_psk_psa/psa/importer.py
Normal file
411
io_scene_psk_psa/psa/importer.py
Normal file
@ -0,0 +1,411 @@
|
|||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
import numpy as np
|
||||||
|
from mathutils import Vector, Quaternion, Matrix
|
||||||
|
from .data import Psa
|
||||||
|
from typing import List, AnyStr, Optional
|
||||||
|
from bpy.types import Operator, Action, UIList, PropertyGroup, Panel, Armature, FileSelectParams
|
||||||
|
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||||
|
from bpy.props import StringProperty, BoolProperty, CollectionProperty, PointerProperty, IntProperty
|
||||||
|
from .reader import PsaReader
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImporter(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def import_psa(self, psa_reader: PsaReader, sequence_names: List[AnyStr], armature_object):
|
||||||
|
sequences = map(lambda x: psa_reader.sequences[x], sequence_names)
|
||||||
|
armature_data = armature_object.data
|
||||||
|
|
||||||
|
class ImportBone(object):
|
||||||
|
def __init__(self, psa_bone: Psa.Bone):
|
||||||
|
self.psa_bone: Psa.Bone = psa_bone
|
||||||
|
self.parent: Optional[ImportBone] = None
|
||||||
|
self.armature_bone = None
|
||||||
|
self.pose_bone = None
|
||||||
|
self.orig_loc: Vector = Vector()
|
||||||
|
self.orig_quat: Quaternion = Quaternion()
|
||||||
|
self.post_quat: Quaternion = Quaternion()
|
||||||
|
self.fcurves = []
|
||||||
|
|
||||||
|
def calculate_fcurve_data(import_bone: ImportBone, key_data: []):
|
||||||
|
# Convert world-space transforms to local-space transforms.
|
||||||
|
key_rotation = Quaternion(key_data[0:4])
|
||||||
|
key_location = Vector(key_data[4:])
|
||||||
|
q = import_bone.post_quat.copy()
|
||||||
|
q.rotate(import_bone.orig_quat)
|
||||||
|
quat = q
|
||||||
|
q = import_bone.post_quat.copy()
|
||||||
|
if import_bone.parent is None:
|
||||||
|
q.rotate(key_rotation.conjugated())
|
||||||
|
else:
|
||||||
|
q.rotate(key_rotation)
|
||||||
|
quat.rotate(q.conjugated())
|
||||||
|
loc = key_location - import_bone.orig_loc
|
||||||
|
loc.rotate(import_bone.post_quat.conjugated())
|
||||||
|
return quat.w, quat.x, quat.y, quat.z, loc.x, loc.y, loc.z
|
||||||
|
|
||||||
|
# Create an index mapping from bones in the PSA to bones in the target armature.
|
||||||
|
psa_to_armature_bone_indices = {}
|
||||||
|
armature_bone_names = [x.name for x in armature_data.bones]
|
||||||
|
psa_bone_names = []
|
||||||
|
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
||||||
|
psa_bone_name = psa_bone.name.decode('windows-1252')
|
||||||
|
psa_bone_names.append(psa_bone_name)
|
||||||
|
try:
|
||||||
|
psa_to_armature_bone_indices[psa_bone_index] = armature_bone_names.index(psa_bone_name)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Report if there are missing bones in the target armature.
|
||||||
|
missing_bone_names = set(psa_bone_names).difference(set(armature_bone_names))
|
||||||
|
if len(missing_bone_names) > 0:
|
||||||
|
print(f'The armature object \'{armature_object.name}\' is missing the following bones that exist in the PSA:')
|
||||||
|
print(list(sorted(missing_bone_names)))
|
||||||
|
del armature_bone_names
|
||||||
|
|
||||||
|
# Create intermediate bone data for import operations.
|
||||||
|
import_bones = []
|
||||||
|
import_bones_dict = dict()
|
||||||
|
|
||||||
|
for psa_bone_index, psa_bone in enumerate(psa_reader.bones):
|
||||||
|
bone_name = psa_bone.name.decode('windows-1252')
|
||||||
|
if psa_bone_index not in psa_to_armature_bone_indices: # TODO: replace with bone_name in armature_data.bones
|
||||||
|
# PSA bone does not map to armature bone, skip it and leave an empty bone in its place.
|
||||||
|
import_bones.append(None)
|
||||||
|
continue
|
||||||
|
import_bone = ImportBone(psa_bone)
|
||||||
|
import_bone.armature_bone = armature_data.bones[bone_name]
|
||||||
|
import_bone.pose_bone = armature_object.pose.bones[bone_name]
|
||||||
|
import_bones_dict[bone_name] = import_bone
|
||||||
|
import_bones.append(import_bone)
|
||||||
|
|
||||||
|
for import_bone in filter(lambda x: x is not None, import_bones):
|
||||||
|
armature_bone = import_bone.armature_bone
|
||||||
|
if armature_bone.parent is not None and armature_bone.parent.name in psa_bone_names:
|
||||||
|
import_bone.parent = import_bones_dict[armature_bone.parent.name]
|
||||||
|
# Calculate the original location & rotation of each bone (in world-space maybe?)
|
||||||
|
if armature_bone.get('orig_quat') is not None:
|
||||||
|
# TODO: ideally we don't rely on bone auxiliary data like this, the non-aux data path is incorrect (animations are flipped 180 around Z)
|
||||||
|
import_bone.orig_quat = Quaternion(armature_bone['orig_quat'])
|
||||||
|
import_bone.orig_loc = Vector(armature_bone['orig_loc'])
|
||||||
|
import_bone.post_quat = Quaternion(armature_bone['post_quat'])
|
||||||
|
else:
|
||||||
|
if import_bone.parent is not None:
|
||||||
|
import_bone.orig_loc = armature_bone.matrix_local.translation - armature_bone.parent.matrix_local.translation
|
||||||
|
import_bone.orig_loc.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
|
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
||||||
|
import_bone.orig_quat.rotate(armature_bone.parent.matrix_local.to_quaternion().conjugated())
|
||||||
|
import_bone.orig_quat.conjugate()
|
||||||
|
else:
|
||||||
|
import_bone.orig_loc = armature_bone.matrix_local.translation.copy()
|
||||||
|
import_bone.orig_quat = armature_bone.matrix_local.to_quaternion()
|
||||||
|
import_bone.post_quat = import_bone.orig_quat.conjugated()
|
||||||
|
|
||||||
|
# Create and populate the data for new sequences.
|
||||||
|
for sequence in sequences:
|
||||||
|
# Add the action.
|
||||||
|
action = bpy.data.actions.new(name=sequence.name.decode())
|
||||||
|
|
||||||
|
# Create f-curves for the rotation and location of each bone.
|
||||||
|
for psa_bone_index, armature_bone_index in psa_to_armature_bone_indices.items():
|
||||||
|
import_bone = import_bones[psa_bone_index]
|
||||||
|
pose_bone = import_bone.pose_bone
|
||||||
|
rotation_data_path = pose_bone.path_from_id('rotation_quaternion')
|
||||||
|
location_data_path = pose_bone.path_from_id('location')
|
||||||
|
import_bone.fcurves = [
|
||||||
|
action.fcurves.new(rotation_data_path, index=0), # Qw
|
||||||
|
action.fcurves.new(rotation_data_path, index=1), # Qx
|
||||||
|
action.fcurves.new(rotation_data_path, index=2), # Qy
|
||||||
|
action.fcurves.new(rotation_data_path, index=3), # Qz
|
||||||
|
action.fcurves.new(location_data_path, index=0), # Lx
|
||||||
|
action.fcurves.new(location_data_path, index=1), # Ly
|
||||||
|
action.fcurves.new(location_data_path, index=2), # Lz
|
||||||
|
]
|
||||||
|
|
||||||
|
# Read the sequence keys from the PSA file.
|
||||||
|
sequence_name = sequence.name.decode('windows-1252')
|
||||||
|
|
||||||
|
# Read the sequence data matrix from the PSA.
|
||||||
|
sequence_data_matrix = psa_reader.read_sequence_data_matrix(sequence_name)
|
||||||
|
keyframe_write_matrix = np.ones(sequence_data_matrix.shape, dtype=np.int8)
|
||||||
|
|
||||||
|
# The first step is to determine the frames at which each bone will write out a keyframe.
|
||||||
|
threshold = 0.001
|
||||||
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
|
if import_bone is None:
|
||||||
|
continue
|
||||||
|
for fcurve_index, fcurve in enumerate(import_bone.fcurves):
|
||||||
|
# Get all the keyframe data for the bone's f-curve data from the sequence data matrix.
|
||||||
|
fcurve_frame_data = sequence_data_matrix[:, bone_index, fcurve_index]
|
||||||
|
last_written_datum = 0
|
||||||
|
for frame_index, datum in enumerate(fcurve_frame_data):
|
||||||
|
# If the f-curve data is not different enough to the last written frame, un-mark this data for writing.
|
||||||
|
if frame_index > 0 and abs(datum - last_written_datum) < threshold:
|
||||||
|
keyframe_write_matrix[frame_index, bone_index, fcurve_index] = 0
|
||||||
|
else:
|
||||||
|
last_written_datum = fcurve_frame_data[frame_index]
|
||||||
|
|
||||||
|
# Write the keyframes out!
|
||||||
|
for frame_index in range(sequence.frame_count):
|
||||||
|
for bone_index, import_bone in enumerate(import_bones):
|
||||||
|
if import_bone is None:
|
||||||
|
continue
|
||||||
|
bone_has_writeable_keyframes = any(keyframe_write_matrix[frame_index, bone_index])
|
||||||
|
if bone_has_writeable_keyframes:
|
||||||
|
# This bone has writeable keyframes for this frame.
|
||||||
|
key_data = sequence_data_matrix[frame_index, bone_index]
|
||||||
|
# Calculate the local-space key data for the bone.
|
||||||
|
fcurve_data = calculate_fcurve_data(import_bone, key_data)
|
||||||
|
for fcurve, should_write, datum in zip(import_bone.fcurves, keyframe_write_matrix[frame_index, bone_index], fcurve_data):
|
||||||
|
if should_write:
|
||||||
|
fcurve.keyframe_points.insert(frame_index, datum, options={'FAST'})
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportPsaBoneItem(PropertyGroup):
|
||||||
|
bone_name: StringProperty()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.bone_name
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportActionListItem(PropertyGroup):
|
||||||
|
action_name: StringProperty()
|
||||||
|
frame_count: IntProperty()
|
||||||
|
is_selected: BoolProperty(default=False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.action_name
|
||||||
|
|
||||||
|
|
||||||
|
def on_psa_file_path_updated(property, context):
|
||||||
|
print('PATH UPDATED')
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
property_group.action_list.clear()
|
||||||
|
property_group.psa_bones.clear()
|
||||||
|
try:
|
||||||
|
# Read the file and populate the action list.
|
||||||
|
p = os.path.abspath(property_group.psa_file_path)
|
||||||
|
psa_reader = PsaReader(p)
|
||||||
|
for sequence in psa_reader.sequences.values():
|
||||||
|
item = property_group.action_list.add()
|
||||||
|
item.action_name = sequence.name.decode('windows-1252')
|
||||||
|
item.frame_count = sequence.frame_count
|
||||||
|
item.is_selected = True
|
||||||
|
|
||||||
|
for psa_bone in psa_reader.bones:
|
||||||
|
item = property_group.psa_bones.add()
|
||||||
|
item.bone_name = psa_bone.name
|
||||||
|
except IOError as e:
|
||||||
|
print('ERROR READING FILE')
|
||||||
|
print(e)
|
||||||
|
# TODO: set an error somewhere so the user knows the PSA could not be read.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def on_armature_object_updated(property, context):
|
||||||
|
# TODO: ensure that there are matching bones between the two rigs.
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
armature_object = property_group.armature_object
|
||||||
|
if armature_object is not None:
|
||||||
|
armature_bone_names = set(map(lambda bone: bone.name, armature_object.data.bones))
|
||||||
|
psa_bone_names = set(map(lambda psa_bone: psa_bone.name, property_group.psa_bones))
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportPropertyGroup(bpy.types.PropertyGroup):
|
||||||
|
psa_file_path: StringProperty(default='', update=on_psa_file_path_updated, name='PSA File Path')
|
||||||
|
psa_bones: CollectionProperty(type=PsaImportPsaBoneItem)
|
||||||
|
# armature_object: PointerProperty(name='Object', type=bpy.types.Object, update=on_armature_object_updated)
|
||||||
|
action_list: CollectionProperty(type=PsaImportActionListItem)
|
||||||
|
action_list_index: IntProperty(name='', default=0)
|
||||||
|
action_filter_name: StringProperty(default='')
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_UL_ImportActionList(UIList):
|
||||||
|
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
row = layout.row(align=True)
|
||||||
|
split = row.split(align=True, factor=0.75)
|
||||||
|
action_col = split.row(align=True)
|
||||||
|
action_col.alignment = 'LEFT'
|
||||||
|
action_col.prop(item, 'is_selected', icon_only=True)
|
||||||
|
action_col.label(text=item.action_name)
|
||||||
|
|
||||||
|
def draw_filter(self, context, layout):
|
||||||
|
row = layout.row()
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.prop(self, 'filter_name', text="")
|
||||||
|
subrow.prop(self, 'use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
|
||||||
|
subrow = row.row(align=True)
|
||||||
|
subrow.prop(self, 'use_filter_sort_reverse', text='', icon='SORT_ASC')
|
||||||
|
|
||||||
|
def filter_items(self, context, data, property):
|
||||||
|
actions = getattr(data, property)
|
||||||
|
flt_flags = []
|
||||||
|
flt_neworder = []
|
||||||
|
if self.filter_name:
|
||||||
|
flt_flags = bpy.types.UI_UL_list.filter_items_by_name(
|
||||||
|
self.filter_name,
|
||||||
|
self.bitflag_filter_item,
|
||||||
|
actions,
|
||||||
|
'action_name',
|
||||||
|
reverse=self.use_filter_invert
|
||||||
|
)
|
||||||
|
return flt_flags, flt_neworder
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportSelectAll(bpy.types.Operator):
|
||||||
|
bl_idname = 'psa_import.actions_select_all'
|
||||||
|
bl_label = 'All'
|
||||||
|
bl_description = 'Select all actions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
action_list = property_group.action_list
|
||||||
|
has_unselected_actions = any(map(lambda action: not action.is_selected, action_list))
|
||||||
|
return len(action_list) > 0 and has_unselected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
for action in property_group.action_list:
|
||||||
|
action.is_selected = True
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportDeselectAll(bpy.types.Operator):
|
||||||
|
bl_idname = 'psa_import.actions_deselect_all'
|
||||||
|
bl_label = 'None'
|
||||||
|
bl_description = 'Deselect all actions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
action_list = property_group.action_list
|
||||||
|
has_selected_actions = any(map(lambda action: action.is_selected, action_list))
|
||||||
|
return len(action_list) > 0 and has_selected_actions
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
for action in property_group.action_list:
|
||||||
|
action.is_selected = False
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PSA_PT_ImportPanel(Panel):
|
||||||
|
bl_space_type = 'PROPERTIES'
|
||||||
|
bl_region_type = 'WINDOW'
|
||||||
|
bl_label = 'PSA Import'
|
||||||
|
bl_context = 'data'
|
||||||
|
bl_category = 'PSA Import'
|
||||||
|
bl_options = {'DEFAULT_CLOSED'}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
return context.object.type == 'ARMATURE'
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
|
||||||
|
row = layout.row()
|
||||||
|
row.prop(property_group, 'psa_file_path', text='')
|
||||||
|
row.enabled = False
|
||||||
|
# row.enabled = property_group.psa_file_path is not ''
|
||||||
|
row = layout.row()
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
row.operator('psa_import.select_file', text='Select PSA File', icon='FILEBROWSER')
|
||||||
|
if len(property_group.action_list) > 0:
|
||||||
|
box = layout.box()
|
||||||
|
box.label(text=f'Actions ({len(property_group.action_list)})', icon='ACTION')
|
||||||
|
row = box.row()
|
||||||
|
rows = max(3, min(len(property_group.action_list), 10))
|
||||||
|
row.template_list('PSA_UL_ImportActionList', '', property_group, 'action_list', property_group, 'action_list_index', rows=rows)
|
||||||
|
row = box.row(align=True)
|
||||||
|
row.label(text='Select')
|
||||||
|
row.operator('psa_import.actions_select_all', text='All')
|
||||||
|
row.operator('psa_import.actions_deselect_all', text='None')
|
||||||
|
|
||||||
|
layout.separator()
|
||||||
|
|
||||||
|
layout.operator('psa_import.import', text=f'Import')
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportSelectFile(Operator):
|
||||||
|
bl_idname = 'psa_import.select_file'
|
||||||
|
bl_label = 'Select'
|
||||||
|
bl_options = {'REGISTER', 'UNDO'}
|
||||||
|
bl_description = 'Select a PSA file from which to import animations'
|
||||||
|
filepath: bpy.props.StringProperty(subtype='FILE_PATH')
|
||||||
|
filter_glob: bpy.props.StringProperty(default="*.psa", options={'HIDDEN'})
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
context.scene.psa_import.psa_file_path = self.filepath
|
||||||
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
return {"RUNNING_MODAL"}
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportOperator(Operator):
|
||||||
|
bl_idname = 'psa_import.import'
|
||||||
|
bl_label = 'Import'
|
||||||
|
bl_description = 'Import the selected animations into the scene as actions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def poll(cls, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
active_object = context.view_layer.objects.active
|
||||||
|
action_list = property_group.action_list
|
||||||
|
has_selected_actions = any(map(lambda action: action.is_selected, action_list))
|
||||||
|
return has_selected_actions and active_object is not None and active_object.type == 'ARMATURE'
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
psa_reader = PsaReader(property_group.psa_file_path)
|
||||||
|
sequence_names = [x.action_name for x in property_group.action_list if x.is_selected]
|
||||||
|
PsaImporter().import_psa(psa_reader, sequence_names, context.view_layer.objects.active)
|
||||||
|
self.report({'INFO'}, f'Imported {len(sequence_names)} action(s)')
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PsaImportFileSelectOperator(Operator, ImportHelper):
|
||||||
|
bl_idname = 'psa_import.file_select'
|
||||||
|
bl_label = 'File Select'
|
||||||
|
filename_ext = '.psa'
|
||||||
|
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for importing the PSA file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psa_import
|
||||||
|
property_group.psa_file_path = self.filepath
|
||||||
|
# Load the sequence names from the selected file
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
__classes__ = [
|
||||||
|
PsaImportPsaBoneItem,
|
||||||
|
PsaImportActionListItem,
|
||||||
|
PsaImportPropertyGroup,
|
||||||
|
PSA_UL_ImportActionList,
|
||||||
|
PsaImportSelectAll,
|
||||||
|
PsaImportDeselectAll,
|
||||||
|
PSA_PT_ImportPanel,
|
||||||
|
PsaImportOperator,
|
||||||
|
PsaImportFileSelectOperator,
|
||||||
|
PsaImportSelectFile,
|
||||||
|
]
|
91
io_scene_psk_psa/psa/reader.py
Normal file
91
io_scene_psk_psa/psa/reader.py
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
from .data import *
|
||||||
|
import ctypes
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
class PsaReader(object):
|
||||||
|
"""
|
||||||
|
This class will read the sequences and bone information immediately upon instantiation and hold onto a file handle.
|
||||||
|
The key data is not read into memory upon instantiation due to it's potentially very large size.
|
||||||
|
To read the key data for a particular sequence, call `read_sequence_keys`.
|
||||||
|
"""
|
||||||
|
def __init__(self, path):
|
||||||
|
self.keys_data_offset: int = 0
|
||||||
|
self.fp = open(path, 'rb')
|
||||||
|
self.psa: Psa = self._read(self.fp)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bones(self):
|
||||||
|
return self.psa.bones
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sequences(self):
|
||||||
|
return self.psa.sequences
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
||||||
|
buffer_length = section.data_size * section.data_count
|
||||||
|
buffer = fp.read(buffer_length)
|
||||||
|
offset = 0
|
||||||
|
for _ in range(section.data_count):
|
||||||
|
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||||
|
offset += section.data_size
|
||||||
|
|
||||||
|
def read_sequence_data_matrix(self, sequence_name: str):
|
||||||
|
sequence = self.psa.sequences[sequence_name]
|
||||||
|
keys = self.read_sequence_keys(sequence_name)
|
||||||
|
bone_count = len(self.bones)
|
||||||
|
matrix_size = sequence.frame_count, bone_count, 7
|
||||||
|
matrix = np.zeros(matrix_size)
|
||||||
|
keys_iter = iter(keys)
|
||||||
|
for frame_index in range(sequence.frame_count):
|
||||||
|
for bone_index in range(bone_count):
|
||||||
|
matrix[frame_index, bone_index, :] = list(next(keys_iter).data)
|
||||||
|
return matrix
|
||||||
|
|
||||||
|
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
|
||||||
|
""" Reads and returns the key data for a sequence.
|
||||||
|
|
||||||
|
:param sequence_name: The name of the sequence.
|
||||||
|
:return: A list of Psa.Keys.
|
||||||
|
"""
|
||||||
|
# Set the file reader to the beginning of the keys data
|
||||||
|
sequence = self.psa.sequences[sequence_name]
|
||||||
|
data_size = sizeof(Psa.Key)
|
||||||
|
bone_count = len(self.psa.bones)
|
||||||
|
buffer_length = data_size * bone_count * sequence.frame_count
|
||||||
|
sequence_keys_offset = self.keys_data_offset + (sequence.frame_start_index * bone_count * data_size)
|
||||||
|
self.fp.seek(sequence_keys_offset, 0)
|
||||||
|
buffer = self.fp.read(buffer_length)
|
||||||
|
offset = 0
|
||||||
|
keys = []
|
||||||
|
for _ in range(sequence.frame_count * bone_count):
|
||||||
|
key = Psa.Key.from_buffer_copy(buffer, offset)
|
||||||
|
keys.append(key)
|
||||||
|
offset += data_size
|
||||||
|
return keys
|
||||||
|
|
||||||
|
def _read(self, fp) -> Psa:
|
||||||
|
psa = Psa()
|
||||||
|
while fp.read(1):
|
||||||
|
fp.seek(-1, 1)
|
||||||
|
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
||||||
|
if section.name == b'ANIMHEAD':
|
||||||
|
pass
|
||||||
|
elif section.name == b'BONENAMES':
|
||||||
|
PsaReader._read_types(fp, Psa.Bone, section, psa.bones)
|
||||||
|
elif section.name == b'ANIMINFO':
|
||||||
|
sequences = []
|
||||||
|
PsaReader._read_types(fp, Psa.Sequence, section, sequences)
|
||||||
|
for sequence in sequences:
|
||||||
|
psa.sequences[sequence.name.decode()] = sequence
|
||||||
|
elif section.name == b'ANIMKEYS':
|
||||||
|
# Skip keys on this pass. We will keep this file open and read from it as needed.
|
||||||
|
self.keys_data_offset = fp.tell()
|
||||||
|
fp.seek(section.data_size * section.data_count, 1)
|
||||||
|
elif section.name in [b'SCALEKEYS']:
|
||||||
|
fp.seek(section.data_size * section.data_count, 1)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'Unrecognized section "{section.name}"')
|
||||||
|
return psa
|
||||||
|
1
|
@ -2,8 +2,8 @@ import bpy
|
|||||||
import bmesh
|
import bmesh
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from .data import *
|
from .data import *
|
||||||
|
from ..helpers import *
|
||||||
|
|
||||||
# https://github.com/bwrsandman/blender-addons/blob/master/io_export_unreal_psk_psa.py
|
|
||||||
|
|
||||||
class PskInputObjects(object):
|
class PskInputObjects(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -11,6 +11,12 @@ class PskInputObjects(object):
|
|||||||
self.armature_object = None
|
self.armature_object = None
|
||||||
|
|
||||||
|
|
||||||
|
class PskBuilderOptions(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.bone_filter_mode = 'ALL'
|
||||||
|
self.bone_group_indices = []
|
||||||
|
|
||||||
|
|
||||||
class PskBuilder(object):
|
class PskBuilder(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
@ -40,7 +46,7 @@ class PskBuilder(object):
|
|||||||
modifiers = [x for x in obj.modifiers if x.type == 'ARMATURE']
|
modifiers = [x for x in obj.modifiers if x.type == 'ARMATURE']
|
||||||
if len(modifiers) == 0:
|
if len(modifiers) == 0:
|
||||||
continue
|
continue
|
||||||
elif len(modifiers) == 2:
|
elif len(modifiers) > 1:
|
||||||
raise RuntimeError(f'Mesh "{obj.name}" must have only one armature modifier')
|
raise RuntimeError(f'Mesh "{obj.name}" must have only one armature modifier')
|
||||||
armature_modifier_objects.add(modifiers[0].object)
|
armature_modifier_objects.add(modifiers[0].object)
|
||||||
|
|
||||||
@ -51,14 +57,18 @@ class PskBuilder(object):
|
|||||||
|
|
||||||
return input_objects
|
return input_objects
|
||||||
|
|
||||||
def build(self, context) -> Psk:
|
def build(self, context, options: PskBuilderOptions) -> Psk:
|
||||||
input_objects = PskBuilder.get_input_objects(context)
|
input_objects = PskBuilder.get_input_objects(context)
|
||||||
|
|
||||||
|
armature_object = input_objects.armature_object
|
||||||
|
|
||||||
psk = Psk()
|
psk = Psk()
|
||||||
|
bones = []
|
||||||
materials = OrderedDict()
|
materials = OrderedDict()
|
||||||
|
|
||||||
if input_objects.armature_object is None:
|
if armature_object is None:
|
||||||
# Static mesh (no armature)
|
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
|
||||||
|
# that a PSK file must have at least one bone.
|
||||||
psk_bone = Psk.Bone()
|
psk_bone = Psk.Bone()
|
||||||
psk_bone.name = bytes('static', encoding='utf-8')
|
psk_bone.name = bytes('static', encoding='utf-8')
|
||||||
psk_bone.flags = 0
|
psk_bone.flags = 0
|
||||||
@ -68,15 +78,33 @@ class PskBuilder(object):
|
|||||||
psk_bone.rotation = Quaternion(0, 0, 0, 1)
|
psk_bone.rotation = Quaternion(0, 0, 0, 1)
|
||||||
psk.bones.append(psk_bone)
|
psk.bones.append(psk_bone)
|
||||||
else:
|
else:
|
||||||
bones = list(input_objects.armature_object.data.bones)
|
bones = list(armature_object.data.bones)
|
||||||
|
|
||||||
|
# If we are filtering by bone groups, get only the bones that are in the specified bone groups and their
|
||||||
|
# ancestors.
|
||||||
|
if options.bone_filter_mode == 'BONE_GROUPS':
|
||||||
|
bone_indices = get_export_bone_indices_for_bone_groups(armature_object, options.bone_group_indices)
|
||||||
|
bones = [bones[bone_index] for bone_index in bone_indices]
|
||||||
|
|
||||||
|
# Ensure that the exported hierarchy has a single root bone.
|
||||||
|
root_bones = [x for x in bones if x.parent is None]
|
||||||
|
print('root bones')
|
||||||
|
print(root_bones)
|
||||||
|
if len(root_bones) > 1:
|
||||||
|
root_bone_names = [x.name for x in bones]
|
||||||
|
raise RuntimeError('Exported bone hierarchy must have a single root bone.'
|
||||||
|
f'The bone hierarchy marked for export has {len(root_bones)} root bones: {root_bone_names}')
|
||||||
|
|
||||||
for bone in bones:
|
for bone in bones:
|
||||||
psk_bone = Psk.Bone()
|
psk_bone = Psk.Bone()
|
||||||
psk_bone.name = bytes(bone.name, encoding='utf-8')
|
psk_bone.name = bytes(bone.name, encoding='utf-8')
|
||||||
psk_bone.flags = 0
|
psk_bone.flags = 0
|
||||||
psk_bone.children_count = len(bone.children)
|
psk_bone.children_count = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
psk_bone.parent_index = bones.index(bone.parent)
|
parent_index = bones.index(bone.parent)
|
||||||
|
psk_bone.parent_index = parent_index
|
||||||
|
psk.bones[parent_index].children_count += 1
|
||||||
except ValueError:
|
except ValueError:
|
||||||
psk_bone.parent_index = 0
|
psk_bone.parent_index = 0
|
||||||
|
|
||||||
@ -90,8 +118,8 @@ class PskBuilder(object):
|
|||||||
parent_tail = quat_parent @ bone.parent.tail
|
parent_tail = quat_parent @ bone.parent.tail
|
||||||
location = (parent_tail - parent_head) + bone.head
|
location = (parent_tail - parent_head) + bone.head
|
||||||
else:
|
else:
|
||||||
location = input_objects.armature_object.matrix_local @ bone.head
|
location = armature_object.matrix_local @ bone.head
|
||||||
rot_matrix = bone.matrix @ input_objects.armature_object.matrix_local.to_3x3()
|
rot_matrix = bone.matrix @ armature_object.matrix_local.to_3x3()
|
||||||
rotation = rot_matrix.to_quaternion()
|
rotation = rot_matrix.to_quaternion()
|
||||||
|
|
||||||
psk_bone.location.x = location.x
|
psk_bone.location.x = location.x
|
||||||
@ -177,14 +205,34 @@ class PskBuilder(object):
|
|||||||
psk.faces.append(face)
|
psk.faces.append(face)
|
||||||
|
|
||||||
# WEIGHTS
|
# WEIGHTS
|
||||||
# TODO: bone ~> vg might not be 1:1, provide a nice error message if this is the case
|
if armature_object is not None:
|
||||||
if input_objects.armature_object is not None:
|
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
|
||||||
armature = input_objects.armature_object.data
|
# we must filter them out and not export any weights for these vertex groups.
|
||||||
bone_names = [x.name for x in armature.bones]
|
bone_names = [x.name for x in bones]
|
||||||
vertex_group_names = [x.name for x in object.vertex_groups]
|
vertex_group_names = [x.name for x in object.vertex_groups]
|
||||||
bone_indices = [bone_names.index(name) for name in vertex_group_names]
|
vertex_group_bone_indices = dict()
|
||||||
|
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
|
||||||
|
try:
|
||||||
|
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name)
|
||||||
|
except ValueError:
|
||||||
|
# The vertex group does not have a matching bone in the list of bones to be exported.
|
||||||
|
# Check to see if there is an associated bone for this vertex group that exists in the armature.
|
||||||
|
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
|
||||||
|
# weighting the vertices belonging to this vertex group.
|
||||||
|
if vertex_group_name in armature_object.data.bones:
|
||||||
|
bone = armature_object.data.bones[vertex_group_name]
|
||||||
|
while bone is not None:
|
||||||
|
try:
|
||||||
|
bone_index = bone_names.index(bone.name)
|
||||||
|
vertex_group_bone_indices[vertex_group_index] = bone_index
|
||||||
|
break
|
||||||
|
except ValueError:
|
||||||
|
bone = bone.parent
|
||||||
for vertex_group_index, vertex_group in enumerate(object.vertex_groups):
|
for vertex_group_index, vertex_group in enumerate(object.vertex_groups):
|
||||||
bone_index = bone_indices[vertex_group_index]
|
if vertex_group_index not in vertex_group_bone_indices:
|
||||||
|
continue
|
||||||
|
bone_index = vertex_group_bone_indices[vertex_group_index]
|
||||||
|
# TODO: exclude vertex group if it doesn't match to a bone we are exporting
|
||||||
for vertex_index in range(len(object.data.vertices)):
|
for vertex_index in range(len(object.data.vertices)):
|
||||||
try:
|
try:
|
||||||
weight = vertex_group.weight(vertex_index)
|
weight = vertex_group.weight(vertex_index)
|
@ -36,8 +36,8 @@ class Psk(object):
|
|||||||
class Face(Structure):
|
class Face(Structure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
('wedge_indices', c_uint16 * 3),
|
('wedge_indices', c_uint16 * 3),
|
||||||
('material_index', c_int8),
|
('material_index', c_uint8),
|
||||||
('aux_material_index', c_int8),
|
('aux_material_index', c_uint8),
|
||||||
('smoothing_groups', c_int32)
|
('smoothing_groups', c_int32)
|
||||||
]
|
]
|
||||||
|
|
154
io_scene_psk_psa/psk/exporter.py
Normal file
154
io_scene_psk_psa/psk/exporter.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
from .data import *
|
||||||
|
from ..types import BoneGroupListItem
|
||||||
|
from ..helpers import populate_bone_group_list
|
||||||
|
from .builder import PskBuilder, PskBuilderOptions
|
||||||
|
from typing import Type
|
||||||
|
from bpy.types import Operator, PropertyGroup
|
||||||
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
from bpy.props import StringProperty, CollectionProperty, IntProperty, BoolProperty, EnumProperty
|
||||||
|
|
||||||
|
MAX_WEDGE_COUNT = 65536
|
||||||
|
MAX_POINT_COUNT = 4294967296
|
||||||
|
MAX_BONE_COUNT = 256
|
||||||
|
MAX_MATERIAL_COUNT = 256
|
||||||
|
|
||||||
|
|
||||||
|
class PskExporter(object):
|
||||||
|
|
||||||
|
def __init__(self, psk: Psk):
|
||||||
|
self.psk: Psk = psk
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def write_section(fp, name: bytes, data_type: Type[Structure] = None, data: list = None):
|
||||||
|
section = Section()
|
||||||
|
section.name = name
|
||||||
|
if data_type is not None and data is not None:
|
||||||
|
section.data_size = sizeof(data_type)
|
||||||
|
section.data_count = len(data)
|
||||||
|
fp.write(section)
|
||||||
|
if data is not None:
|
||||||
|
for datum in data:
|
||||||
|
fp.write(datum)
|
||||||
|
|
||||||
|
def export(self, path: str):
|
||||||
|
if len(self.psk.wedges) > MAX_WEDGE_COUNT:
|
||||||
|
raise RuntimeError(f'Number of wedges ({len(self.psk.wedges)}) exceeds limit of {MAX_WEDGE_COUNT}')
|
||||||
|
if len(self.psk.bones) > MAX_BONE_COUNT:
|
||||||
|
raise RuntimeError(f'Number of bones ({len(self.psk.bones)}) exceeds limit of {MAX_BONE_COUNT}')
|
||||||
|
if len(self.psk.points) > MAX_POINT_COUNT:
|
||||||
|
raise RuntimeError(f'Numbers of vertices ({len(self.psk.points)}) exceeds limit of {MAX_POINT_COUNT}')
|
||||||
|
if len(self.psk.materials) > MAX_MATERIAL_COUNT:
|
||||||
|
raise RuntimeError(f'Number of materials ({len(self.psk.materials)}) exceeds limit of {MAX_MATERIAL_COUNT}')
|
||||||
|
|
||||||
|
with open(path, 'wb') as fp:
|
||||||
|
self.write_section(fp, b'ACTRHEAD')
|
||||||
|
self.write_section(fp, b'PNTS0000', Vector3, self.psk.points)
|
||||||
|
|
||||||
|
wedges = []
|
||||||
|
for index, w in enumerate(self.psk.wedges):
|
||||||
|
wedge = Psk.Wedge16()
|
||||||
|
wedge.material_index = w.material_index
|
||||||
|
wedge.u = w.u
|
||||||
|
wedge.v = w.v
|
||||||
|
wedge.point_index = w.point_index
|
||||||
|
wedges.append(wedge)
|
||||||
|
|
||||||
|
self.write_section(fp, b'VTXW0000', Psk.Wedge16, wedges)
|
||||||
|
self.write_section(fp, b'FACE0000', Psk.Face, self.psk.faces)
|
||||||
|
self.write_section(fp, b'MATT0000', Psk.Material, self.psk.materials)
|
||||||
|
self.write_section(fp, b'REFSKELT', Psk.Bone, self.psk.bones)
|
||||||
|
self.write_section(fp, b'RAWWEIGHTS', Psk.Weight, self.psk.weights)
|
||||||
|
|
||||||
|
|
||||||
|
def is_bone_filter_mode_item_available(context, identifier):
|
||||||
|
input_objects = PskBuilder.get_input_objects(context)
|
||||||
|
armature_object = input_objects.armature_object
|
||||||
|
if identifier == 'BONE_GROUPS':
|
||||||
|
if not armature_object or not armature_object.pose or not armature_object.pose.bone_groups:
|
||||||
|
return False
|
||||||
|
# else if... you can set up other conditions if you add more options
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class PskExportOperator(Operator, ExportHelper):
|
||||||
|
bl_idname = 'export.psk'
|
||||||
|
bl_label = 'Export'
|
||||||
|
__doc__ = 'Export mesh and armature to PSK'
|
||||||
|
filename_ext = '.psk'
|
||||||
|
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
||||||
|
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSK file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def invoke(self, context, event):
|
||||||
|
try:
|
||||||
|
input_objects = PskBuilder.get_input_objects(context)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
|
||||||
|
property_group = context.scene.psk_export
|
||||||
|
|
||||||
|
# Populate bone groups list.
|
||||||
|
populate_bone_group_list(input_objects.armature_object, property_group.bone_group_list)
|
||||||
|
|
||||||
|
context.window_manager.fileselect_add(self)
|
||||||
|
|
||||||
|
return {'RUNNING_MODAL'}
|
||||||
|
|
||||||
|
def draw(self, context):
|
||||||
|
layout = self.layout
|
||||||
|
scene = context.scene
|
||||||
|
property_group = scene.psk_export
|
||||||
|
|
||||||
|
# BONES
|
||||||
|
box = layout.box()
|
||||||
|
box.label(text='Bones', icon='BONE_DATA')
|
||||||
|
bone_filter_mode_items = property_group.bl_rna.properties['bone_filter_mode'].enum_items_static
|
||||||
|
row = box.row(align=True)
|
||||||
|
for item in bone_filter_mode_items:
|
||||||
|
identifier = item.identifier
|
||||||
|
item_layout = row.row(align=True)
|
||||||
|
item_layout.prop_enum(property_group, 'bone_filter_mode', item.identifier)
|
||||||
|
item_layout.enabled = is_bone_filter_mode_item_available(context, identifier)
|
||||||
|
|
||||||
|
if property_group.bone_filter_mode == 'BONE_GROUPS':
|
||||||
|
row = box.row()
|
||||||
|
rows = max(3, min(len(property_group.bone_group_list), 10))
|
||||||
|
row.template_list('PSX_UL_BoneGroupList', '', property_group, 'bone_group_list', property_group, 'bone_group_list_index', rows=rows)
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
property_group = context.scene.psk_export
|
||||||
|
builder = PskBuilder()
|
||||||
|
options = PskBuilderOptions()
|
||||||
|
options.bone_group_indices = [x.index for x in property_group.bone_group_list if x.is_selected]
|
||||||
|
try:
|
||||||
|
psk = builder.build(context, options)
|
||||||
|
except RuntimeError as e:
|
||||||
|
self.report({'ERROR_INVALID_CONTEXT'}, str(e))
|
||||||
|
return {'CANCELLED'}
|
||||||
|
exporter = PskExporter(psk)
|
||||||
|
exporter.export(self.filepath)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
class PskExportPropertyGroup(PropertyGroup):
|
||||||
|
bone_filter_mode: EnumProperty(
|
||||||
|
name='Bone Filter',
|
||||||
|
description='',
|
||||||
|
items=(
|
||||||
|
('ALL', 'All', 'All bones will be exported.'),
|
||||||
|
('BONE_GROUPS', 'Bone Groups', 'Only bones belonging to the selected bone groups and their ancestors will be exported.')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
bone_group_list: CollectionProperty(type=BoneGroupListItem)
|
||||||
|
bone_group_list_index: IntProperty(default=0)
|
||||||
|
|
||||||
|
|
||||||
|
__classes__ = [
|
||||||
|
PskExportOperator,
|
||||||
|
PskExportPropertyGroup
|
||||||
|
]
|
189
io_scene_psk_psa/psk/importer.py
Normal file
189
io_scene_psk_psa/psk/importer.py
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
import os
|
||||||
|
import bpy
|
||||||
|
import bmesh
|
||||||
|
from typing import Optional
|
||||||
|
from .data import Psk
|
||||||
|
from mathutils import Quaternion, Vector, Matrix
|
||||||
|
from .reader import PskReader
|
||||||
|
from bpy.props import StringProperty
|
||||||
|
from bpy.types import Operator
|
||||||
|
from bpy_extras.io_utils import ImportHelper
|
||||||
|
|
||||||
|
|
||||||
|
class PskImporter(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def import_psk(self, psk: Psk, name: str, context):
|
||||||
|
# ARMATURE
|
||||||
|
armature_data = bpy.data.armatures.new(name)
|
||||||
|
armature_object = bpy.data.objects.new(name, armature_data)
|
||||||
|
armature_object.show_in_front = True
|
||||||
|
|
||||||
|
context.scene.collection.objects.link(armature_object)
|
||||||
|
|
||||||
|
try:
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
armature_object.select_set(state=True)
|
||||||
|
bpy.context.view_layer.objects.active = armature_object
|
||||||
|
|
||||||
|
bpy.ops.object.mode_set(mode='EDIT')
|
||||||
|
|
||||||
|
# Intermediate bone type for the purpose of construction.
|
||||||
|
class ImportBone(object):
|
||||||
|
def __init__(self, index: int, psk_bone: Psk.Bone):
|
||||||
|
self.index: int = index
|
||||||
|
self.psk_bone: Psk.Bone = psk_bone
|
||||||
|
self.parent: Optional[ImportBone] = None
|
||||||
|
self.local_rotation: Quaternion = Quaternion()
|
||||||
|
self.local_translation: Vector = Vector()
|
||||||
|
self.world_rotation_matrix: Matrix = Matrix()
|
||||||
|
self.world_matrix: Matrix = Matrix()
|
||||||
|
self.vertex_group = None
|
||||||
|
self.orig_quat: Quaternion = Quaternion()
|
||||||
|
self.orig_loc: Vector = Vector()
|
||||||
|
self.post_quat: Quaternion = Quaternion()
|
||||||
|
|
||||||
|
import_bones = []
|
||||||
|
new_bone_size = 8.0
|
||||||
|
|
||||||
|
for bone_index, psk_bone in enumerate(psk.bones):
|
||||||
|
import_bone = ImportBone(bone_index, psk_bone)
|
||||||
|
psk_bone.parent_index = max(0, psk_bone.parent_index)
|
||||||
|
import_bone.local_rotation = Quaternion(tuple(psk_bone.rotation))
|
||||||
|
import_bone.local_translation = Vector(tuple(psk_bone.location))
|
||||||
|
if psk_bone.parent_index == 0 and bone_index == 0:
|
||||||
|
import_bone.world_rotation_matrix = import_bone.local_rotation.to_matrix()
|
||||||
|
import_bone.world_matrix = Matrix.Translation(import_bone.local_translation)
|
||||||
|
import_bones.append(import_bone)
|
||||||
|
|
||||||
|
for bone_index, bone in enumerate(import_bones):
|
||||||
|
if bone.psk_bone.parent_index == 0 and bone_index == 0:
|
||||||
|
continue
|
||||||
|
parent = import_bones[bone.psk_bone.parent_index]
|
||||||
|
bone.parent = parent
|
||||||
|
bone.world_matrix = parent.world_rotation_matrix.to_4x4()
|
||||||
|
translation = bone.local_translation.copy()
|
||||||
|
translation.rotate(parent.world_rotation_matrix)
|
||||||
|
bone.world_matrix.translation = parent.world_matrix.translation + translation
|
||||||
|
bone.world_rotation_matrix = bone.local_rotation.conjugated().to_matrix()
|
||||||
|
bone.world_rotation_matrix.rotate(parent.world_rotation_matrix)
|
||||||
|
|
||||||
|
for import_bone in import_bones:
|
||||||
|
bone_name = import_bone.psk_bone.name.decode('utf-8')
|
||||||
|
edit_bone = armature_data.edit_bones.new(bone_name)
|
||||||
|
|
||||||
|
if import_bone.parent is not None:
|
||||||
|
edit_bone.parent = armature_data.edit_bones[import_bone.psk_bone.parent_index]
|
||||||
|
else:
|
||||||
|
import_bone.local_rotation.conjugate()
|
||||||
|
|
||||||
|
edit_bone.tail = Vector((0.0, new_bone_size, 0.0))
|
||||||
|
edit_bone_matrix = import_bone.local_rotation.conjugated()
|
||||||
|
edit_bone_matrix.rotate(import_bone.world_matrix)
|
||||||
|
edit_bone_matrix = edit_bone_matrix.to_matrix().to_4x4()
|
||||||
|
edit_bone_matrix.translation = import_bone.world_matrix.translation
|
||||||
|
edit_bone.matrix = edit_bone_matrix
|
||||||
|
|
||||||
|
# Store bind pose information in the bone's custom properties.
|
||||||
|
# This information is used when importing animations from PSA files.
|
||||||
|
edit_bone['orig_quat'] = import_bone.local_rotation
|
||||||
|
edit_bone['orig_loc'] = import_bone.local_translation
|
||||||
|
edit_bone['post_quat'] = import_bone.local_rotation.conjugated()
|
||||||
|
|
||||||
|
# MESH
|
||||||
|
mesh_data = bpy.data.meshes.new(name)
|
||||||
|
mesh_object = bpy.data.objects.new(name, mesh_data)
|
||||||
|
|
||||||
|
# MATERIALS
|
||||||
|
for material in psk.materials:
|
||||||
|
# TODO: re-use of materials should be an option
|
||||||
|
bpy_material = bpy.data.materials.new(material.name.decode('utf-8'))
|
||||||
|
mesh_data.materials.append(bpy_material)
|
||||||
|
|
||||||
|
bm = bmesh.new()
|
||||||
|
|
||||||
|
# VERTICES
|
||||||
|
for point in psk.points:
|
||||||
|
bm.verts.new(tuple(point))
|
||||||
|
|
||||||
|
bm.verts.ensure_lookup_table()
|
||||||
|
|
||||||
|
degenerate_face_indices = set()
|
||||||
|
for face_index, face in enumerate(psk.faces):
|
||||||
|
point_indices = [bm.verts[psk.wedges[i].point_index] for i in reversed(face.wedge_indices)]
|
||||||
|
try:
|
||||||
|
bm_face = bm.faces.new(point_indices)
|
||||||
|
bm_face.material_index = face.material_index
|
||||||
|
except ValueError:
|
||||||
|
degenerate_face_indices.add(face_index)
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(degenerate_face_indices) > 0:
|
||||||
|
print(f'WARNING: Discarded {len(degenerate_face_indices)} degenerate face(s).')
|
||||||
|
|
||||||
|
bm.to_mesh(mesh_data)
|
||||||
|
|
||||||
|
# TEXTURE COORDINATES
|
||||||
|
data_index = 0
|
||||||
|
uv_layer = mesh_data.uv_layers.new()
|
||||||
|
for face_index, face in enumerate(psk.faces):
|
||||||
|
if face_index in degenerate_face_indices:
|
||||||
|
continue
|
||||||
|
face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)]
|
||||||
|
for wedge in face_wedges:
|
||||||
|
uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v
|
||||||
|
data_index += 1
|
||||||
|
|
||||||
|
bm.normal_update()
|
||||||
|
bm.free()
|
||||||
|
|
||||||
|
# VERTEX WEIGHTS
|
||||||
|
|
||||||
|
# Get a list of all bones that have weights associated with them.
|
||||||
|
vertex_group_bone_indices = set(map(lambda weight: weight.bone_index, psk.weights))
|
||||||
|
for import_bone in map(lambda x: import_bones[x], sorted(list(vertex_group_bone_indices))):
|
||||||
|
import_bone.vertex_group = mesh_object.vertex_groups.new(name=import_bone.psk_bone.name.decode('windows-1252'))
|
||||||
|
|
||||||
|
for weight in psk.weights:
|
||||||
|
import_bones[weight.bone_index].vertex_group.add((weight.point_index,), weight.weight, 'ADD')
|
||||||
|
|
||||||
|
# Add armature modifier to our mesh object.
|
||||||
|
armature_modifier = mesh_object.modifiers.new(name='Armature', type='ARMATURE')
|
||||||
|
armature_modifier.object = armature_object
|
||||||
|
mesh_object.parent = armature_object
|
||||||
|
|
||||||
|
context.scene.collection.objects.link(mesh_object)
|
||||||
|
|
||||||
|
try:
|
||||||
|
bpy.ops.object.mode_set(mode='OBJECT')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PskImportOperator(Operator, ImportHelper):
|
||||||
|
bl_idname = 'import.psk'
|
||||||
|
bl_label = 'Export'
|
||||||
|
__doc__ = 'Load a PSK file'
|
||||||
|
filename_ext = '.psk'
|
||||||
|
filter_glob: StringProperty(default='*.psk', options={'HIDDEN'})
|
||||||
|
filepath: StringProperty(
|
||||||
|
name='File Path',
|
||||||
|
description='File path used for exporting the PSK file',
|
||||||
|
maxlen=1024,
|
||||||
|
default='')
|
||||||
|
|
||||||
|
def execute(self, context):
|
||||||
|
reader = PskReader()
|
||||||
|
psk = reader.read(self.filepath)
|
||||||
|
name = os.path.splitext(os.path.basename(self.filepath))[0]
|
||||||
|
PskImporter().import_psk(psk, name, context)
|
||||||
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
|
||||||
|
__classes__ = [
|
||||||
|
PskImportOperator
|
||||||
|
]
|
46
io_scene_psk_psa/psk/reader.py
Normal file
46
io_scene_psk_psa/psk/reader.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from .data import *
|
||||||
|
import ctypes
|
||||||
|
|
||||||
|
|
||||||
|
class PskReader(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def read_types(fp, data_class: ctypes.Structure, section: Section, data):
|
||||||
|
buffer_length = section.data_size * section.data_count
|
||||||
|
buffer = fp.read(buffer_length)
|
||||||
|
offset = 0
|
||||||
|
for _ in range(section.data_count):
|
||||||
|
data.append(data_class.from_buffer_copy(buffer, offset))
|
||||||
|
offset += section.data_size
|
||||||
|
|
||||||
|
def read(self, path) -> Psk:
|
||||||
|
psk = Psk()
|
||||||
|
with open(path, 'rb') as fp:
|
||||||
|
while fp.read(1):
|
||||||
|
fp.seek(-1, 1)
|
||||||
|
section = Section.from_buffer_copy(fp.read(ctypes.sizeof(Section)))
|
||||||
|
if section.name == b'ACTRHEAD':
|
||||||
|
pass
|
||||||
|
elif section.name == b'PNTS0000':
|
||||||
|
PskReader.read_types(fp, Vector3, section, psk.points)
|
||||||
|
elif section.name == b'VTXW0000':
|
||||||
|
if section.data_size == ctypes.sizeof(Psk.Wedge16):
|
||||||
|
PskReader.read_types(fp, Psk.Wedge16, section, psk.wedges)
|
||||||
|
elif section.data_size == ctypes.sizeof(Psk.Wedge32):
|
||||||
|
PskReader.read_types(fp, Psk.Wedge32, section, psk.wedges)
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Unrecognized wedge format')
|
||||||
|
elif section.name == b'FACE0000':
|
||||||
|
PskReader.read_types(fp, Psk.Face, section, psk.faces)
|
||||||
|
elif section.name == b'MATT0000':
|
||||||
|
PskReader.read_types(fp, Psk.Material, section, psk.materials)
|
||||||
|
elif section.name == b'REFSKELT':
|
||||||
|
PskReader.read_types(fp, Psk.Bone, section, psk.bones)
|
||||||
|
elif section.name == b'RAWWEIGHTS':
|
||||||
|
PskReader.read_types(fp, Psk.Weight, section, psk.weights)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'Unrecognized section "{section.name}"')
|
||||||
|
return psk
|
25
io_scene_psk_psa/types.py
Normal file
25
io_scene_psk_psa/types.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from bpy.types import PropertyGroup, UIList
|
||||||
|
from bpy.props import StringProperty, IntProperty, BoolProperty
|
||||||
|
|
||||||
|
|
||||||
|
class PSX_UL_BoneGroupList(UIList):
|
||||||
|
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
|
||||||
|
layout.alignment = 'LEFT'
|
||||||
|
layout.prop(item, 'is_selected', icon_only=True)
|
||||||
|
layout.label(text=item.name, icon='GROUP_BONE' if item.index >= 0 else 'NONE')
|
||||||
|
|
||||||
|
|
||||||
|
class BoneGroupListItem(PropertyGroup):
|
||||||
|
name: StringProperty()
|
||||||
|
index: IntProperty()
|
||||||
|
is_selected: BoolProperty(default=False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
__classes__ = [
|
||||||
|
BoneGroupListItem,
|
||||||
|
PSX_UL_BoneGroupList
|
||||||
|
]
|
Loading…
x
Reference in New Issue
Block a user