1
0
mirror of https://github.com/DarklightGames/io_scene_psk_psa.git synced 2024-11-28 00:20:48 +01:00

Merge branch 'master' into blender-4.1

This commit is contained in:
Colin Basnett 2024-02-12 18:02:59 -08:00
commit 8c49c8f34e
20 changed files with 246 additions and 157 deletions

View File

@ -57,6 +57,12 @@ Bug fixes will be issued for legacy addon versions that are under [Blender's LTS
> Note that in order to see the imported actions applied to your armature, you must use the [Dope Sheet](https://docs.blender.org/manual/en/latest/editors/dope_sheet/introduction.html) or [Nonlinear Animation](https://docs.blender.org/manual/en/latest/editors/nla/introduction.html) editors.
# FAQ
## Why can't I see the animations imported from my PSA?
Simply importing an animation into the scene will not automatically apply the action to the armature. This is in part because a PSA can have multiple sequences imported from it, and also that it's generally bad form for importers to modify the scene when they don't need to.
The PSA importer creates [Actions](https://docs.blender.org/manual/en/latest/animation/actions.html) for each of the selected sequences in the PSA. These actions can be applied to your armature via the [Action Editor](https://docs.blender.org/manual/en/latest/editors/dope_sheet/action.html) or [NLA Editor](https://docs.blender.org/manual/en/latest/editors/nla/index.html).
## Why are the mesh normals not accurate when importing a PSK extracted from [UE Viewer](https://www.gildor.org/en/projects/umodel)?
If preserving the mesh normals of models is important for your workflow, it is *not recommended* to export PSK files from UE Viewer. This is because UE Viewer makes no attempt to reconstruct the original [smoothing groups](https://en.wikipedia.org/wiki/Smoothing_group). As a result, the normals of imported PSK files will be incorrect when imported into Blender and will need to be manually fixed.

View File

@ -1,15 +1,15 @@
from bpy.app.handlers import persistent
bl_info = {
"name": "PSK/PSA Importer/Exporter",
"author": "Colin Basnett, Yurii Ti",
"version": (6, 1, 0),
"blender": (4, 0, 0),
"description": "PSK/PSA Import/Export (.psk/.psa)",
"warning": "",
"doc_url": "https://github.com/DarklightGames/io_scene_psk_psa",
"tracker_url": "https://github.com/DarklightGames/io_scene_psk_psa/issues",
"category": "Import-Export"
'name': 'PSK/PSA Importer/Exporter',
'author': 'Colin Basnett, Yurii Ti',
'version': (6, 1, 2),
'blender': (4, 0, 0),
'description': 'PSK/PSA Import/Export (.psk/.psa)',
'warning': '',
'doc_url': 'https://github.com/DarklightGames/io_scene_psk_psa',
'tracker_url': 'https://github.com/DarklightGames/io_scene_psk_psa/issues',
'category': 'Import-Export'
}
if 'bpy' in locals():

View File

@ -30,12 +30,12 @@ def get_nla_strips_in_frame_range(animation_data: AnimData, frame_min: float, fr
def populate_bone_collection_list(armature_object: Object, bone_collection_list: bpy.props.CollectionProperty) -> None:
"""
'''
Updates the bone collections collection.
Bone collection selections are preserved between updates unless none of the groups were previously selected;
otherwise, all collections are selected by default.
"""
'''
has_selected_collections = any([g.is_selected for g in bone_collection_list])
unassigned_collection_is_selected, selected_assigned_collection_names = True, []
@ -84,7 +84,7 @@ def check_bone_names(bone_names: Iterable[str]):
def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_collection_indices: List[int]) -> List[str]:
"""
'''
Returns a sorted list of bone indices that should be exported for the given bone filter mode and bone collections.
Note that the ancestors of bones within the bone collections will also be present in the returned list.
@ -93,7 +93,7 @@ def get_export_bone_names(armature_object: Object, bone_filter_mode: str, bone_c
:param bone_filter_mode: One of ['ALL', 'BONE_COLLECTIONS']
:param bone_collection_indices: List of bone collection indices to be exported.
:return: A sorted list of bone indices that should be exported.
"""
'''
if armature_object is None or armature_object.type != 'ARMATURE':
raise ValueError('An armature object must be supplied')

View File

@ -13,36 +13,66 @@ class PsaConfig:
self.sequence_bone_flags: Dict[str, Dict[int, int]] = dict()
def _load_config_file(file_path: str) -> ConfigParser:
'''
UEViewer exports a dialect of INI files that is not compatible with Python's ConfigParser.
Specifically, it allows values in this format:
[Section]
Key1
Key2
This is not allowed in Python's ConfigParser, which requires a '=' character after each key name.
To work around this, we'll modify the file to add the '=' character after each key name if it is missing.
'''
with open(file_path, 'r') as f:
lines = f.read().split('\n')
lines = [re.sub(r'^\s*(\w+)\s*$', r'\1=', line) for line in lines]
contents = '\n'.join(lines)
config = ConfigParser()
config.read_string(contents)
return config
def _get_bone_flags_from_value(value: str) -> int:
match value:
case 'all':
return (REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION)
case 'trans':
return REMOVE_TRACK_LOCATION
case 'rot':
return REMOVE_TRACK_ROTATION
case _:
return 0
def read_psa_config(psa_reader: PsaReader, file_path: str) -> PsaConfig:
psa_config = PsaConfig()
config = ConfigParser()
config.read(file_path)
psa_sequence_names = list(psa_reader.sequences.keys())
lowercase_sequence_names = [sequence_name.lower() for sequence_name in psa_sequence_names]
config = _load_config_file(file_path)
if config.has_section('RemoveTracks'):
for key, value in config.items('RemoveTracks'):
match = re.match(f'^(.+)\.(\d+)$', key)
sequence_name = match.group(1)
bone_index = int(match.group(2))
# Map the sequence name onto the actual sequence name in the PSA file.
try:
psa_sequence_names = list(psa_reader.sequences.keys())
lowercase_sequence_names = [sequence_name.lower() for sequence_name in psa_sequence_names]
sequence_name = psa_sequence_names[lowercase_sequence_names.index(sequence_name.lower())]
except ValueError:
pass
# Sequence name is not in the PSA file.
continue
if sequence_name not in psa_config.sequence_bone_flags:
psa_config.sequence_bone_flags[sequence_name] = dict()
match value:
case 'all':
psa_config.sequence_bone_flags[sequence_name][bone_index] = (REMOVE_TRACK_LOCATION | REMOVE_TRACK_ROTATION)
case 'trans':
psa_config.sequence_bone_flags[sequence_name][bone_index] = REMOVE_TRACK_LOCATION
case 'rot':
psa_config.sequence_bone_flags[sequence_name][bone_index] = REMOVE_TRACK_ROTATION
bone_index = int(match.group(2))
psa_config.sequence_bone_flags[sequence_name][bone_index] = _get_bone_flags_from_value(value)
return psa_config

View File

@ -4,10 +4,10 @@ from typing import List
from ..data import *
"""
'''
Note that keys are not stored within the Psa object.
Use the PsaReader::get_sequence_keys to get the keys for a sequence.
"""
'''
class Psa:

View File

@ -47,7 +47,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
if not is_action_for_armature(armature, action):
continue
if not action.name.startswith('#'):
if action.name != '' and not action.name.startswith('#'):
for (name, frame_start, frame_end) in get_sequences_from_action(action):
item = pg.action_list.add()
item.action = action
@ -60,7 +60,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
# Pose markers are not guaranteed to be in frame-order, so make sure that they are.
pose_markers = sorted(action.pose_markers, key=lambda x: x.frame)
for pose_marker_index, pose_marker in enumerate(pose_markers):
if pose_marker.name.startswith('#'):
if pose_marker.name.strip() == '' or pose_marker.name.startswith('#'):
continue
for (name, frame_start, frame_end) in get_sequences_from_action_pose_marker(action, pose_markers, pose_marker, pose_marker_index):
item = pg.action_list.add()
@ -78,7 +78,7 @@ def update_actions_and_timeline_markers(context: Context, armature: Armature):
for marker_name in marker_names:
if marker_name not in sequence_frame_ranges:
continue
if marker_name.startswith('#'):
if marker_name.strip() == '' or marker_name.startswith('#'):
continue
frame_start, frame_end = sequence_frame_ranges[marker_name]
sequences = get_sequences_from_name_and_frame_range(marker_name, frame_start, frame_end)

View File

@ -152,7 +152,7 @@ class PSA_PG_export(PropertyGroup):
default=False,
name='Enforce Bone Name Restrictions',
description='Bone names restrictions will be enforced. Note that bone names without properly formatted names '
'cannot be referenced in scripts'
'may not be able to be referenced in-engine'
)
sequence_name_prefix: StringProperty(name='Prefix', options=empty_set)
sequence_name_suffix: StringProperty(name='Suffix', options=empty_set)

View File

@ -30,15 +30,15 @@ class PSA_UL_export_sequences(UIList):
pg = getattr(context.scene, 'psa_export')
row = layout.row()
subrow = row.row(align=True)
subrow.prop(pg, 'sequence_filter_name', text="")
subrow.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
subrow.prop(pg, 'sequence_filter_name', text='')
subrow.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
# subrow.prop(pg, 'sequence_use_filter_sort_reverse', text='', icon='SORT_ASC')
if pg.sequence_source == 'ACTIONS':
subrow = row.row(align=True)
subrow.prop(pg, 'sequence_filter_asset', icon_only=True, icon='ASSET_MANAGER')
subrow.prop(pg, 'sequence_filter_pose_marker', icon_only=True, icon='PMARKER')
subrow.prop(pg, 'sequence_filter_reversed', text="", icon='FRAME_PREV')
subrow.prop(pg, 'sequence_filter_reversed', text='', icon='FRAME_PREV')
def filter_items(self, context, data, prop):
pg = getattr(context.scene, 'psa_export')

View File

@ -95,15 +95,15 @@ class PSA_OT_import_select_file(Operator):
bl_options = {'INTERNAL'}
bl_description = 'Select a PSA file from which to import animations'
filepath: StringProperty(subtype='FILE_PATH')
filter_glob: StringProperty(default="*.psa", options={'HIDDEN'})
filter_glob: StringProperty(default='*.psa', options={'HIDDEN'})
def execute(self, context):
getattr(context.scene, 'psa_import').psa_file_path = self.filepath
return {"FINISHED"}
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {"RUNNING_MODAL"}
return {'RUNNING_MODAL'}
def load_psa_file(context, filepath: str):
@ -158,6 +158,10 @@ class PSA_OT_import(Operator, ImportHelper):
psa_reader = PsaReader(self.filepath)
sequence_names = [x.action_name for x in pg.sequence_list if x.is_selected]
if len(sequence_names) == 0:
self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected')
return {'CANCELLED'}
options = PsaImportOptions()
options.sequence_names = sequence_names
options.should_use_fake_user = pg.should_use_fake_user
@ -171,14 +175,14 @@ class PSA_OT_import(Operator, ImportHelper):
options.fps_source = pg.fps_source
options.fps_custom = pg.fps_custom
if options.should_use_config_file:
# Read the PSA config file if it exists.
config_path = Path(self.filepath).with_suffix('.config')
if config_path.exists():
try:
options.psa_config = read_psa_config(psa_reader, str(config_path))
if len(sequence_names) == 0:
self.report({'ERROR_INVALID_CONTEXT'}, 'No sequences selected')
return {'CANCELLED'}
except Exception as e:
self.report({'WARNING'}, f'Failed to read PSA config file: {e}')
result = import_psa(context, psa_reader, context.view_layer.objects.active, options)
@ -258,6 +262,8 @@ class PSA_OT_import(Operator, ImportHelper):
col.use_property_decorate = False
col.prop(pg, 'should_use_fake_user')
col.prop(pg, 'should_stash')
col.prop(pg, 'should_use_config_file')
col.prop(pg, 'should_use_action_name_prefix')
if pg.should_use_action_name_prefix:

View File

@ -32,6 +32,12 @@ class PSA_PG_import(PropertyGroup):
description='Assign each imported action a fake user so that the data block is '
'saved even it has no users',
options=empty_set)
should_use_config_file: BoolProperty(default=True, name='Use Config File',
description='Use the .config file that is sometimes generated when the PSA '
'file is exported from UEViewer. This file contains '
'options that can be used to filter out certain bones tracks '
'from the imported actions',
options=empty_set)
should_stash: BoolProperty(default=False, name='Stash',
description='Stash each imported action as a strip on a new non-contributing NLA track',
options=empty_set)

View File

@ -17,10 +17,10 @@ class PSA_UL_sequences(UIList):
pg = getattr(context.scene, 'psa_import')
row = layout.row()
sub_row = row.row(align=True)
sub_row.prop(pg, 'sequence_filter_name', text="")
sub_row.prop(pg, 'sequence_use_filter_invert', text="", icon='ARROW_LEFTRIGHT')
sub_row.prop(pg, 'sequence_use_filter_regex', text="", icon='SORTBYEXT')
sub_row.prop(pg, 'sequence_filter_is_selected', text="", icon='CHECKBOX_HLT')
sub_row.prop(pg, 'sequence_filter_name', text='')
sub_row.prop(pg, 'sequence_use_filter_invert', text='', icon='ARROW_LEFTRIGHT')
sub_row.prop(pg, 'sequence_use_filter_regex', text='', icon='SORTBYEXT')
sub_row.prop(pg, 'sequence_filter_is_selected', text='', icon='CHECKBOX_HLT')
def filter_items(self, context, data, property_):
pg = getattr(context.scene, 'psa_import')

View File

@ -24,6 +24,7 @@ class PsaImportOptions(object):
self.bone_mapping_mode = 'CASE_INSENSITIVE'
self.fps_source = 'SEQUENCE'
self.fps_custom: float = 30.0
self.should_use_config_file = True
self.psa_config: PsaConfig = PsaConfig()
@ -63,12 +64,12 @@ class PsaImportResult:
def _get_armature_bone_index_for_psa_bone(psa_bone_name: str, armature_bone_names: List[str], bone_mapping_mode: str = 'EXACT') -> Optional[int]:
"""
'''
@param psa_bone_name: The name of the PSA bone.
@param armature_bone_names: The names of the bones in the armature.
@param bone_mapping_mode: One of 'EXACT' or 'CASE_INSENSITIVE'.
@return: The index of the armature bone that corresponds to the given PSA bone, or None if no such bone exists.
"""
'''
for armature_bone_index, armature_bone_name in enumerate(armature_bone_names):
if bone_mapping_mode == 'CASE_INSENSITIVE':
if armature_bone_name.lower() == psa_bone_name.lower():
@ -175,14 +176,14 @@ def import_psa(context: Context, psa_reader: PsaReader, armature_object: Object,
action = bpy.data.actions.new(name=action_name)
# Calculate the target FPS.
target_fps = sequence.fps
if options.fps_source == 'CUSTOM':
match options.fps_source:
case 'CUSTOM':
target_fps = options.fps_custom
elif options.fps_source == 'SCENE':
case 'SCENE':
target_fps = context.scene.render.fps
elif options.fps_source == 'SEQUENCE':
case 'SEQUENCE':
target_fps = sequence.fps
else:
case _:
raise ValueError(f'Unknown FPS source: {options.fps_source}')
keyframe_time_dilation = target_fps / sequence.fps

View File

@ -11,8 +11,7 @@ def _try_fix_cue4parse_issue_103(sequences) -> bool:
# The issue was that the frame_start_index was not being set correctly, and was always being set to the same value
# as the frame_count.
# This fix will eventually be deprecated as it is only necessary for files exported prior to the fix.
if len(sequences) > 0:
if sequences[0].frame_start_index == sequences[0].frame_count:
if len(sequences) > 0 and sequences[0].frame_start_index == sequences[0].frame_count:
# Manually set the frame_start_index for each sequence. This assumes that the sequences are in order with
# no shared frames between sequences (all exporters that I know of do this, so it's a safe assumption).
frame_start_index = 0
@ -24,11 +23,11 @@ def _try_fix_cue4parse_issue_103(sequences) -> bool:
class PsaReader(object):
"""
'''
This class reads the sequences and bone information immediately upon instantiation and holds onto a file handle.
The keyframe data is not read into memory upon instantiation due to its potentially very large size.
To read the key data for a particular sequence, call :read_sequence_keys.
"""
'''
def __init__(self, path):
self.keys_data_offset: int = 0
@ -44,11 +43,11 @@ class PsaReader(object):
return self.psa.sequences
def read_sequence_data_matrix(self, sequence_name: str) -> np.ndarray:
"""
'''
Reads and returns the data matrix for the given sequence.
@param sequence_name: The name of the sequence.
@return: An FxBx7 matrix where F is the number of frames, B is the number of bones.
"""
'''
sequence = self.psa.sequences[sequence_name]
keys = self.read_sequence_keys(sequence_name)
bone_count = len(self.bones)
@ -61,12 +60,12 @@ class PsaReader(object):
return matrix
def read_sequence_keys(self, sequence_name: str) -> List[Psa.Key]:
"""
'''
Reads and returns the key data for a sequence.
@param sequence_name: The name of the sequence.
@return: A list of Psa.Keys.
"""
'''
# Set the file reader to the beginning of the keys data
sequence = self.psa.sequences[sequence_name]
data_size = sizeof(Psa.Key)

View File

@ -1,5 +1,8 @@
from typing import Optional
import bmesh
import bpy
import numpy as np
from bpy.types import Armature
from .data import *
@ -9,7 +12,7 @@ from ..helpers import *
class PskInputObjects(object):
def __init__(self):
self.mesh_objects = []
self.armature_object = None
self.armature_object: Optional[Object] = None
class PskBuildOptions(object):
@ -61,7 +64,7 @@ def get_psk_input_objects(context) -> PskInputObjects:
class PskBuildResult(object):
def __init__(self):
self.psk = None
self.warnings = []
self.warnings: List[str] = []
def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
@ -150,6 +153,8 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
for object_index, input_mesh_object in enumerate(input_objects.mesh_objects):
should_flip_normals = False
# MATERIALS
material_indices = [material_names.index(material_slot.material.name) for material_slot in input_mesh_object.material_slots]
@ -177,8 +182,16 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
mesh_object.matrix_world = input_mesh_object.matrix_world
scale = (input_mesh_object.scale.x, input_mesh_object.scale.y, input_mesh_object.scale.z)
if any(map(lambda x: x < 0, scale)):
result.warnings.append(f'Mesh "{input_mesh_object.name}" has negative scaling which may result in inverted normals.')
# Negative scaling in Blender results in inverted normals after the scale is applied. However, if the scale
# is not applied, the normals will appear unaffected in the viewport. The evaluated mesh data used in the
# export will have the scale applied, but this behavior is not obvious to the user.
#
# In order to have the exporter be as WYSIWYG as possible, we need to check for negative scaling and invert
# the normals if necessary. If two axes have negative scaling and the third has positive scaling, the
# normals will be correct. We can detect this by checking if the number of negative scaling axes is odd. If
# it is, we need to invert the normals of the mesh by swapping the order of the vertices in each face.
should_flip_normals = sum(1 for x in scale if x < 0) % 2 == 1
# Copy the vertex groups
for vertex_group in input_mesh_object.vertex_groups:
@ -207,11 +220,11 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
# Build a list of non-unique wedges.
wedges = []
for loop_index, loop in enumerate(mesh_data.loops):
wedge = Psk.Wedge()
wedge.point_index = loop.vertex_index + vertex_offset
wedge.u, wedge.v = uv_layer[loop_index].uv
wedge.v = 1.0 - wedge.v
wedges.append(wedge)
wedges.append(Psk.Wedge(
point_index=loop.vertex_index + vertex_offset,
u=uv_layer[loop_index].uv[0],
v=1.0 - uv_layer[loop_index].uv[1]
))
# Assign material indices to the wedges.
for triangle in mesh_data.loop_triangles:
@ -219,8 +232,8 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
wedges[loop_index].material_index = material_indices[triangle.material_index]
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
wedge_indices = {}
loop_wedge_indices = [-1] * len(mesh_data.loops)
wedge_indices = dict()
loop_wedge_indices = np.full(len(mesh_data.loops), -1)
for loop_index, wedge in enumerate(wedges):
wedge_hash = hash(wedge)
if wedge_hash in wedge_indices:
@ -233,6 +246,7 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
# FACES
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
psk_face_start_index = len(psk.faces)
for f in mesh_data.loop_triangles:
face = Psk.Face()
face.material_index = material_indices[f.material_index]
@ -242,6 +256,11 @@ def build_psk(context, options: PskBuildOptions) -> PskBuildResult:
face.smoothing_groups = poly_groups[f.polygon_index]
psk.faces.append(face)
if should_flip_normals:
# Invert the normals of the faces.
for face in psk.faces[psk_face_start_index:]:
face.wedge_indices[0], face.wedge_indices[2] = face.wedge_indices[2], face.wedge_indices[0]
# WEIGHTS
if armature_object is not None:
armature_data = typing.cast(Armature, armature_object.data)

View File

@ -5,11 +5,11 @@ from ..data import *
class Psk(object):
class Wedge(object):
def __init__(self):
self.point_index: int = 0
self.u: float = 0.0
self.v: float = 0.0
self.material_index: int = 0
def __init__(self, point_index: int, u: float, v: float, material_index: int = 0):
self.point_index: int = point_index
self.u: float = u
self.v: float = v
self.material_index = material_index
def __hash__(self):
return hash(f'{self.point_index}-{self.u}-{self.v}-{self.material_index}')

View File

@ -51,7 +51,7 @@ class PSK_OT_material_list_move_up(Operator):
pg = getattr(context.scene, 'psk_export')
pg.material_list.move(pg.material_list_index, pg.material_list_index - 1)
pg.material_list_index -= 1
return {"FINISHED"}
return {'FINISHED'}
class PSK_OT_material_list_move_down(Operator):
@ -69,7 +69,7 @@ class PSK_OT_material_list_move_down(Operator):
pg = getattr(context.scene, 'psk_export')
pg.material_list.move(pg.material_list_index, pg.material_list_index + 1)
pg.material_list_index += 1
return {"FINISHED"}
return {'FINISHED'}
class PSK_OT_export(Operator, ExportHelper):

View File

@ -38,8 +38,8 @@ class PSK_OT_import(Operator, ImportHelper):
should_import_vertex_colors: BoolProperty(
default=True,
options=empty_set,
name='Vertex Colors',
description='Import vertex colors from PSKX files, if available'
name='Import Vertex Colors',
description='Import vertex colors, if available'
)
vertex_color_space: EnumProperty(
name='Vertex Color Space',
@ -53,13 +53,13 @@ class PSK_OT_import(Operator, ImportHelper):
)
should_import_vertex_normals: BoolProperty(
default=True,
name='Vertex Normals',
name='Import Vertex Normals',
options=empty_set,
description='Import vertex normals, if available'
)
should_import_extra_uvs: BoolProperty(
default=True,
name='Extra UVs',
name='Import Extra UVs',
options=empty_set,
description='Import extra UV maps, if available'
)
@ -74,12 +74,6 @@ class PSK_OT_import(Operator, ImportHelper):
name='Import Materials',
options=empty_set,
)
should_reuse_materials: BoolProperty(
default=True,
name='Reuse Materials',
options=empty_set,
description='Existing materials with matching names will be reused when available'
)
should_import_skeleton: BoolProperty(
default=True,
name='Import Skeleton',
@ -93,14 +87,20 @@ class PSK_OT_import(Operator, ImportHelper):
soft_min=1.0,
name='Bone Length',
options=empty_set,
subtype='DISTANCE',
description='Length of the bones'
)
should_import_shape_keys: BoolProperty(
default=True,
name='Shape Keys',
name='Import Shape Keys',
options=empty_set,
description='Import shape keys, if available'
)
scale: FloatProperty(
name='Scale',
default=1.0,
soft_min=0.0,
)
def execute(self, context):
psk = read_psk(self.filepath)
@ -116,6 +116,11 @@ class PSK_OT_import(Operator, ImportHelper):
options.bone_length = self.bone_length
options.should_import_materials = self.should_import_materials
options.should_import_shape_keys = self.should_import_shape_keys
options.scale = self.scale
if not options.should_import_mesh and not options.should_import_skeleton:
self.report({'ERROR'}, 'Nothing to import')
return {'CANCELLED'}
result = import_psk(psk, context, options)
@ -124,30 +129,42 @@ class PSK_OT_import(Operator, ImportHelper):
message += '\n'.join(result.warnings)
self.report({'WARNING'}, message)
else:
self.report({'INFO'}, f'PSK imported')
self.report({'INFO'}, f'PSK imported ({options.name})')
return {'FINISHED'}
def draw(self, context):
layout = self.layout
layout.prop(self, 'should_import_materials')
row = layout.row()
col = row.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(self, 'scale')
layout.prop(self, 'should_import_mesh')
row = layout.column()
row.use_property_split = True
row.use_property_decorate = False
if self.should_import_mesh:
row.prop(self, 'should_import_vertex_normals')
row.prop(self, 'should_import_extra_uvs')
row.prop(self, 'should_import_vertex_colors')
row = layout.row()
col = row.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(self, 'should_import_materials', text='Materials')
col.prop(self, 'should_import_vertex_normals', text='Vertex Normals')
col.prop(self, 'should_import_extra_uvs', text='Extra UVs')
col.prop(self, 'should_import_vertex_colors', text='Vertex Colors')
if self.should_import_vertex_colors:
row.prop(self, 'vertex_color_space')
row.prop(self, 'should_import_shape_keys')
col.prop(self, 'vertex_color_space')
col.prop(self, 'should_import_shape_keys', text='Shape Keys')
layout.prop(self, 'should_import_skeleton')
row = layout.column()
row.use_property_split = True
row.use_property_decorate = False
if self.should_import_skeleton:
row.prop(self, 'bone_length')
row = layout.row()
col = row.column()
col.use_property_split = True
col.use_property_decorate = False
col.prop(self, 'bone_length')
classes = (

View File

@ -1,4 +1,3 @@
from math import inf
from typing import Optional, List
import bmesh
@ -17,19 +16,20 @@ class PskImportOptions:
self.should_import_mesh = True
self.should_reuse_materials = True
self.should_import_vertex_colors = True
self.vertex_color_space = 'sRGB'
self.vertex_color_space = 'SRGB'
self.should_import_vertex_normals = True
self.should_import_extra_uvs = True
self.should_import_skeleton = True
self.should_import_shape_keys = True
self.bone_length = 1.0
self.should_import_materials = True
self.scale = 1.0
class ImportBone:
"""
'''
Intermediate bone type for the purpose of construction.
"""
'''
def __init__(self, index: int, psk_bone: Psk.Bone):
self.index: int = index
self.psk_bone: Psk.Bone = psk_bone
@ -52,6 +52,7 @@ class PskImportResult:
def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
result = PskImportResult()
armature_object = None
mesh_object = None
if options.should_import_skeleton:
# ARMATURE
@ -144,6 +145,7 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
bm.verts.ensure_lookup_table()
# FACES
invalid_face_indices = set()
for face_index, face in enumerate(psk.faces):
point_indices = map(lambda i: psk.wedges[i].point_index, reversed(face.wedge_indices))
@ -164,61 +166,59 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
bm.to_mesh(mesh_data)
# TEXTURE COORDINATES
data_index = 0
uv_layer_data_index = 0
uv_layer = mesh_data.uv_layers.new(name='VTXW0000')
for face_index, face in enumerate(psk.faces):
if face_index in invalid_face_indices:
continue
face_wedges = [psk.wedges[i] for i in reversed(face.wedge_indices)]
for wedge in face_wedges:
uv_layer.data[data_index].uv = wedge.u, 1.0 - wedge.v
data_index += 1
uv_layer.data[uv_layer_data_index].uv = wedge.u, 1.0 - wedge.v
uv_layer_data_index += 1
# EXTRA UVS
if psk.has_extra_uvs and options.should_import_extra_uvs:
extra_uv_channel_count = int(len(psk.extra_uvs) / len(psk.wedges))
wedge_index_offset = 0
for extra_uv_index in range(extra_uv_channel_count):
data_index = 0
uv_layer_data_index = 0
uv_layer = mesh_data.uv_layers.new(name=f'EXTRAUV{extra_uv_index}')
for face_index, face in enumerate(psk.faces):
if face_index in invalid_face_indices:
continue
for wedge_index in reversed(face.wedge_indices):
u, v = psk.extra_uvs[wedge_index_offset + wedge_index]
uv_layer.data[data_index].uv = u, 1.0 - v
data_index += 1
uv_layer.data[uv_layer_data_index].uv = u, 1.0 - v
uv_layer_data_index += 1
wedge_index_offset += len(psk.wedges)
# VERTEX COLORS
if psk.has_vertex_colors and options.should_import_vertex_colors:
size = (len(psk.points), 4)
vertex_colors = np.full(size, inf)
vertex_color_data = mesh_data.vertex_colors.new(name='VERTEXCOLOR')
ambiguous_vertex_color_point_indices = []
# Convert vertex colors to sRGB if necessary.
psk_vertex_colors = np.zeros((len(psk.vertex_colors), 4))
for vertex_color_index in range(len(psk.vertex_colors)):
psk_vertex_colors[vertex_color_index,:] = psk.vertex_colors[vertex_color_index].normalized()
match options.vertex_color_space:
case 'SRGBA':
for i in range(psk_vertex_colors.shape[0]):
psk_vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), psk_vertex_colors[i, :3]))
case _:
pass
for wedge_index, wedge in enumerate(psk.wedges):
point_index = wedge.point_index
psk_vertex_color = psk.vertex_colors[wedge_index].normalized()
if vertex_colors[point_index, 0] != inf and tuple(vertex_colors[point_index]) != psk_vertex_color:
ambiguous_vertex_color_point_indices.append(point_index)
else:
vertex_colors[point_index] = psk_vertex_color
# Map the PSK vertex colors to the face corners.
face_count = len(psk.faces) - len(invalid_face_indices)
face_corner_colors = np.full((face_count * 3, 4), 1.0)
face_corner_color_index = 0
for face_index, face in enumerate(psk.faces):
if face_index in invalid_face_indices:
continue
for wedge_index in reversed(face.wedge_indices):
face_corner_colors[face_corner_color_index] = psk_vertex_colors[wedge_index]
face_corner_color_index += 1
if options.vertex_color_space == 'SRGBA':
for i in range(vertex_colors.shape[0]):
vertex_colors[i, :3] = tuple(map(lambda x: rgb_to_srgb(x), vertex_colors[i, :3]))
for loop_index, loop in enumerate(mesh_data.loops):
vertex_color = vertex_colors[loop.vertex_index]
if vertex_color is not None:
vertex_color_data.data[loop_index].color = vertex_color
else:
vertex_color_data.data[loop_index].color = 1.0, 1.0, 1.0, 1.0
if len(ambiguous_vertex_color_point_indices) > 0:
result.warnings.append(
f'{len(ambiguous_vertex_color_point_indices)} vertex(es) with ambiguous vertex colors.')
# Create the vertex color attribute.
face_corner_color_attribute = mesh_data.attributes.new(name='VERTEXCOLOR', type='FLOAT_COLOR', domain='CORNER')
face_corner_color_attribute.data.foreach_set('color', face_corner_colors.flatten())
# VERTEX NORMALS
if psk.has_vertex_normals and options.should_import_vertex_normals:
@ -227,6 +227,8 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
for vertex_normal in psk.vertex_normals:
normals.append(tuple(vertex_normal))
mesh_data.normals_split_custom_set_from_vertices(normals)
# TODO: This has been removed in 4.1!
mesh_data.use_auto_smooth = True
else:
mesh_data.shade_smooth()
@ -266,6 +268,9 @@ def import_psk(psk: Psk, context, options: PskImportOptions) -> PskImportResult:
armature_modifier.object = armature_object
mesh_object.parent = armature_object
root_object = armature_object if options.should_import_skeleton else mesh_object
root_object.scale = (options.scale, options.scale, options.scale)
try:
bpy.ops.object.mode_set(mode='OBJECT')
except:

View File

@ -23,7 +23,7 @@ def _read_material_references(path: str) -> List[str]:
return []
# Do a crude regex match to find the Material list entries.
contents = property_file_path.read_text()
pattern = r"Material\s*=\s*([^\s^,]+)"
pattern = r'Material\s*=\s*([^\s^,]+)'
return re.findall(pattern, contents)

View File

@ -6,7 +6,7 @@ from ..data import Section, Vector3
MAX_WEDGE_COUNT = 65536
MAX_POINT_COUNT = 4294967296
MAX_BONE_COUNT = 256
MAX_BONE_COUNT = 2147483647
MAX_MATERIAL_COUNT = 256