Merge pull request #1 from WHSnyder/fully_featured
Import (all but cloth) + Export (skins and anims)
This commit is contained in:
commit
d00dabc8eb
|
@ -1,4 +1,4 @@
|
|||
|
||||
.DS_Store
|
||||
*.msh
|
||||
|
||||
# Created by https://www.gitignore.io/api/python,visualstudiocode
|
||||
|
|
|
@ -53,12 +53,18 @@ if "bpy" in locals():
|
|||
# End of stuff taken from glTF
|
||||
|
||||
import bpy
|
||||
from bpy_extras.io_utils import ExportHelper
|
||||
from bpy.props import BoolProperty, EnumProperty
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
from bpy.props import BoolProperty, EnumProperty, CollectionProperty
|
||||
from bpy.types import Operator
|
||||
from .msh_scene import create_scene
|
||||
from .msh_scene_utilities import create_scene
|
||||
from .msh_scene_save import save_scene
|
||||
from .msh_scene_read import read_scene
|
||||
from .msh_material_properties import *
|
||||
from .msh_skeleton_properties import *
|
||||
from .msh_collision_prim_properties import *
|
||||
from .msh_to_blend import *
|
||||
from .zaa_to_blend import *
|
||||
|
||||
|
||||
class ExportMSH(Operator, ExportHelper):
|
||||
""" Export the current scene as a SWBF .msh file. """
|
||||
|
@ -97,35 +103,128 @@ class ExportMSH(Operator, ExportHelper):
|
|||
default=True
|
||||
)
|
||||
|
||||
export_with_animation: BoolProperty(
|
||||
name="Export With Animation",
|
||||
description="Includes animation data extracted from the action currently set on armature.",
|
||||
default=False
|
||||
)
|
||||
|
||||
export_as_skeleton: BoolProperty(
|
||||
name="Export Objects As Skeleton",
|
||||
description="Check if you intend to export skeleton data for consumption by ZenAsset.",
|
||||
default=False
|
||||
)
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
with open(self.filepath, 'wb') as output_file:
|
||||
save_scene(
|
||||
output_file=output_file,
|
||||
scene=create_scene(
|
||||
generate_triangle_strips=self.generate_triangle_strips,
|
||||
apply_modifiers=self.apply_modifiers,
|
||||
export_target=self.export_target))
|
||||
export_target=self.export_target,
|
||||
skel_only=self.export_as_skeleton,
|
||||
export_anim=self.export_with_animation
|
||||
),
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
# Only needed if you want to add into a dynamic menu
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(ExportMSH.bl_idname, text="SWBF msh (.msh)")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class ImportMSH(Operator, ImportHelper):
|
||||
""" Import an SWBF .msh file. """
|
||||
|
||||
bl_idname = "swbf_msh.import"
|
||||
bl_label = "Import SWBF .msh File"
|
||||
filename_ext = ".msh"
|
||||
|
||||
files: CollectionProperty(
|
||||
name="File Path",
|
||||
type=bpy.types.OperatorFileListElement,
|
||||
)
|
||||
|
||||
filter_glob: StringProperty(
|
||||
default="*.msh;*.zaa;*.zaabin",
|
||||
options={'HIDDEN'},
|
||||
maxlen=255, # Max internal buffer length, longer would be clamped.
|
||||
)
|
||||
|
||||
animation_only: BoolProperty(
|
||||
name="Import Animation Only",
|
||||
description="Import animation and append as a new action to currently selected armature.",
|
||||
default=False
|
||||
)
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
dirname = os.path.dirname(self.filepath)
|
||||
for file in self.files:
|
||||
filepath = os.path.join(dirname, file.name)
|
||||
if filepath.endswith(".zaabin") or filepath.endswith(".zaa"):
|
||||
extract_and_apply_munged_anim(filepath)
|
||||
else:
|
||||
with open(filepath, 'rb') as input_file:
|
||||
scene = read_scene(input_file, self.animation_only)
|
||||
|
||||
if not self.animation_only:
|
||||
extract_scene(filepath, scene)
|
||||
else:
|
||||
extract_and_apply_anim(filepath, scene)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(ImportMSH.bl_idname, text="SWBF msh (.msh)")
|
||||
|
||||
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(CollisionPrimitiveProperties)
|
||||
|
||||
bpy.utils.register_class(MaterialProperties)
|
||||
bpy.utils.register_class(MaterialPropertiesPanel)
|
||||
|
||||
bpy.utils.register_class(SkeletonProperties)
|
||||
bpy.utils.register_class(SkeletonPropertiesPanel)
|
||||
|
||||
bpy.utils.register_class(ExportMSH)
|
||||
bpy.utils.register_class(ImportMSH)
|
||||
|
||||
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
|
||||
bpy.types.Material.swbf_msh = bpy.props.PointerProperty(type=MaterialProperties)
|
||||
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
|
||||
|
||||
bpy.types.Object.swbf_msh_coll_prim = bpy.props.PointerProperty(type=CollisionPrimitiveProperties)
|
||||
bpy.types.Material.swbf_msh_mat = bpy.props.PointerProperty(type=MaterialProperties)
|
||||
bpy.types.Armature.swbf_msh_skel = bpy.props.CollectionProperty(type=SkeletonProperties)
|
||||
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(CollisionPrimitiveProperties)
|
||||
|
||||
bpy.utils.unregister_class(MaterialProperties)
|
||||
bpy.utils.unregister_class(MaterialPropertiesPanel)
|
||||
|
||||
bpy.utils.unregister_class(SkeletonProperties)
|
||||
bpy.utils.unregister_class(SkeletonPropertiesPanel)
|
||||
|
||||
bpy.utils.unregister_class(ExportMSH)
|
||||
bpy.utils.unregister_class(ImportMSH)
|
||||
|
||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
||||
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
|
|
@ -0,0 +1,150 @@
|
|||
"""
|
||||
Reader class for both zaabin, zaa, and msh files.
|
||||
"""
|
||||
|
||||
import io
|
||||
import struct
|
||||
import os
|
||||
|
||||
from mathutils import Vector, Quaternion
|
||||
|
||||
|
||||
class Reader:
|
||||
def __init__(self, file, parent=None, indent=0, debug=False):
|
||||
self.file = file
|
||||
self.size: int = 0
|
||||
self.size_pos = None
|
||||
self.parent = parent
|
||||
self.indent = " " * indent #for print debugging, should be stored as str so msh_scene_read can access it
|
||||
self.debug = debug
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
self.size_pos = self.file.tell()
|
||||
|
||||
if self.parent is not None:
|
||||
self.header = self.read_bytes(4).decode("utf-8")
|
||||
else:
|
||||
self.header = "File"
|
||||
|
||||
if self.parent is not None:
|
||||
self.size = self.read_u32()
|
||||
else:
|
||||
self.size = os.path.getsize(self.file.name) - 8
|
||||
|
||||
# No padding to multiples of 4. Files exported from XSI via zetools do not align by 4!
|
||||
self.end_pos = self.size_pos + self.size + 8
|
||||
|
||||
if self.debug:
|
||||
print("{}Begin {} of Size {} at pos {}:".format(self.indent, self.header, self.size, self.size_pos))
|
||||
|
||||
return self
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if self.size > self.MAX_SIZE:
|
||||
raise OverflowError(f"File overflowed max size. size = {self.size} MAX_SIZE = {self.MAX_SIZE}")
|
||||
|
||||
if self.debug:
|
||||
print("{}End {} at pos: {}".format(self.indent, self.header, self.end_pos))
|
||||
|
||||
self.file.seek(self.end_pos)
|
||||
|
||||
|
||||
|
||||
def read_bytes(self,num_bytes):
|
||||
return self.file.read(num_bytes)
|
||||
|
||||
|
||||
def read_string(self):
|
||||
last_byte = self.read_bytes(1)
|
||||
result = b''
|
||||
while last_byte[0] != 0x0:
|
||||
result += last_byte
|
||||
last_byte = self.read_bytes(1)
|
||||
|
||||
return result.decode("utf-8")
|
||||
|
||||
def read_i8(self, num=1):
|
||||
buf = self.read_bytes(num)
|
||||
result = struct.unpack(f"<{num}b", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_u8(self, num=1):
|
||||
buf = self.read_bytes(num)
|
||||
result = struct.unpack(f"<{num}B", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_i16(self, num=1):
|
||||
buf = self.read_bytes(num * 2)
|
||||
result = struct.unpack(f"<{num}h", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_u16(self, num=1):
|
||||
buf = self.read_bytes(num * 2)
|
||||
result = struct.unpack(f"<{num}H", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_i32(self, num=1):
|
||||
buf = self.read_bytes(num * 4)
|
||||
result = struct.unpack(f"<{num}i", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_u32(self, num=1):
|
||||
buf = self.read_bytes(num * 4)
|
||||
result = struct.unpack(f"<{num}I", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
def read_f32(self, num=1):
|
||||
buf = self.read_bytes(num * 4)
|
||||
result = struct.unpack(f"<{num}f", buf)
|
||||
return result[0] if num == 1 else result
|
||||
|
||||
|
||||
def read_quat(self):
|
||||
rot = self.read_f32(4)
|
||||
return Quaternion((rot[3], rot[0], rot[1], rot[2]))
|
||||
|
||||
def read_vec(self):
|
||||
return Vector(self.read_f32(3))
|
||||
|
||||
|
||||
def read_child(self):
|
||||
child = Reader(self.file, parent=self, indent=int(len(self.indent) / 2) + 1, debug=self.debug)
|
||||
return child
|
||||
|
||||
|
||||
def skip_bytes(self,num):
|
||||
self.file.seek(num,1)
|
||||
|
||||
|
||||
def peak_next_header(self):
|
||||
|
||||
buf = self.read_bytes(4);
|
||||
self.file.seek(-4,1)
|
||||
|
||||
try:
|
||||
result = buf.decode("utf-8")
|
||||
return result
|
||||
except:
|
||||
return ""
|
||||
|
||||
def get_current_pos(self):
|
||||
return self.file.tell()
|
||||
|
||||
def reset_pos(self):
|
||||
self.file.seek(self.size_pos - self.file.tell() + 8, 1)
|
||||
|
||||
def how_much_left(self, pos):
|
||||
return self.end_pos - pos
|
||||
|
||||
def skip_until(self, header):
|
||||
while (self.could_have_child() and header not in self.peak_next_header()):
|
||||
self.skip_bytes(1)
|
||||
|
||||
|
||||
def could_have_child(self):
|
||||
return self.end_pos - self.file.tell() >= 8
|
||||
|
||||
|
||||
MAX_SIZE: int = 2147483647 - 8
|
|
@ -0,0 +1,89 @@
|
|||
'''
|
||||
Based on code by Benedikt Schatz from https://github.com/Schlechtwetterfront/xsizetools/blob/master/Application/Modules/msh2_crc.py
|
||||
'''
|
||||
|
||||
|
||||
# CRC lookup table.
|
||||
table32_lookup = (
|
||||
0x00000000, 0x04C11DB7, 0x09823B6E, 0x0D4326D9,
|
||||
0x130476DC, 0x17C56B6B, 0x1A864DB2, 0x1E475005,
|
||||
0x2608EDB8, 0x22C9F00F, 0x2F8AD6D6, 0x2B4BCB61,
|
||||
0x350C9B64, 0x31CD86D3, 0x3C8EA00A, 0x384FBDBD,
|
||||
0x4C11DB70, 0x48D0C6C7, 0x4593E01E, 0x4152FDA9,
|
||||
0x5F15ADAC, 0x5BD4B01B, 0x569796C2, 0x52568B75,
|
||||
0x6A1936C8, 0x6ED82B7F, 0x639B0DA6, 0x675A1011,
|
||||
0x791D4014, 0x7DDC5DA3, 0x709F7B7A, 0x745E66CD,
|
||||
0x9823B6E0, 0x9CE2AB57, 0x91A18D8E, 0x95609039,
|
||||
0x8B27C03C, 0x8FE6DD8B, 0x82A5FB52, 0x8664E6E5,
|
||||
0xBE2B5B58, 0xBAEA46EF, 0xB7A96036, 0xB3687D81,
|
||||
0xAD2F2D84, 0xA9EE3033, 0xA4AD16EA, 0xA06C0B5D,
|
||||
0xD4326D90, 0xD0F37027, 0xDDB056FE, 0xD9714B49,
|
||||
0xC7361B4C, 0xC3F706FB, 0xCEB42022, 0xCA753D95,
|
||||
0xF23A8028, 0xF6FB9D9F, 0xFBB8BB46, 0xFF79A6F1,
|
||||
0xE13EF6F4, 0xE5FFEB43, 0xE8BCCD9A, 0xEC7DD02D,
|
||||
0x34867077, 0x30476DC0, 0x3D044B19, 0x39C556AE,
|
||||
0x278206AB, 0x23431B1C, 0x2E003DC5, 0x2AC12072,
|
||||
0x128E9DCF, 0x164F8078, 0x1B0CA6A1, 0x1FCDBB16,
|
||||
0x018AEB13, 0x054BF6A4, 0x0808D07D, 0x0CC9CDCA,
|
||||
0x7897AB07, 0x7C56B6B0, 0x71159069, 0x75D48DDE,
|
||||
0x6B93DDDB, 0x6F52C06C, 0x6211E6B5, 0x66D0FB02,
|
||||
0x5E9F46BF, 0x5A5E5B08, 0x571D7DD1, 0x53DC6066,
|
||||
0x4D9B3063, 0x495A2DD4, 0x44190B0D, 0x40D816BA,
|
||||
0xACA5C697, 0xA864DB20, 0xA527FDF9, 0xA1E6E04E,
|
||||
0xBFA1B04B, 0xBB60ADFC, 0xB6238B25, 0xB2E29692,
|
||||
0x8AAD2B2F, 0x8E6C3698, 0x832F1041, 0x87EE0DF6,
|
||||
0x99A95DF3, 0x9D684044, 0x902B669D, 0x94EA7B2A,
|
||||
0xE0B41DE7, 0xE4750050, 0xE9362689, 0xEDF73B3E,
|
||||
0xF3B06B3B, 0xF771768C, 0xFA325055, 0xFEF34DE2,
|
||||
0xC6BCF05F, 0xC27DEDE8, 0xCF3ECB31, 0xCBFFD686,
|
||||
0xD5B88683, 0xD1799B34, 0xDC3ABDED, 0xD8FBA05A,
|
||||
0x690CE0EE, 0x6DCDFD59, 0x608EDB80, 0x644FC637,
|
||||
0x7A089632, 0x7EC98B85, 0x738AAD5C, 0x774BB0EB,
|
||||
0x4F040D56, 0x4BC510E1, 0x46863638, 0x42472B8F,
|
||||
0x5C007B8A, 0x58C1663D, 0x558240E4, 0x51435D53,
|
||||
0x251D3B9E, 0x21DC2629, 0x2C9F00F0, 0x285E1D47,
|
||||
0x36194D42, 0x32D850F5, 0x3F9B762C, 0x3B5A6B9B,
|
||||
0x0315D626, 0x07D4CB91, 0x0A97ED48, 0x0E56F0FF,
|
||||
0x1011A0FA, 0x14D0BD4D, 0x19939B94, 0x1D528623,
|
||||
0xF12F560E, 0xF5EE4BB9, 0xF8AD6D60, 0xFC6C70D7,
|
||||
0xE22B20D2, 0xE6EA3D65, 0xEBA91BBC, 0xEF68060B,
|
||||
0xD727BBB6, 0xD3E6A601, 0xDEA580D8, 0xDA649D6F,
|
||||
0xC423CD6A, 0xC0E2D0DD, 0xCDA1F604, 0xC960EBB3,
|
||||
0xBD3E8D7E, 0xB9FF90C9, 0xB4BCB610, 0xB07DABA7,
|
||||
0xAE3AFBA2, 0xAAFBE615, 0xA7B8C0CC, 0xA379DD7B,
|
||||
0x9B3660C6, 0x9FF77D71, 0x92B45BA8, 0x9675461F,
|
||||
0x8832161A, 0x8CF30BAD, 0x81B02D74, 0x857130C3,
|
||||
0x5D8A9099, 0x594B8D2E, 0x5408ABF7, 0x50C9B640,
|
||||
0x4E8EE645, 0x4A4FFBF2, 0x470CDD2B, 0x43CDC09C,
|
||||
0x7B827D21, 0x7F436096, 0x7200464F, 0x76C15BF8,
|
||||
0x68860BFD, 0x6C47164A, 0x61043093, 0x65C52D24,
|
||||
0x119B4BE9, 0x155A565E, 0x18197087, 0x1CD86D30,
|
||||
0x029F3D35, 0x065E2082, 0x0B1D065B, 0x0FDC1BEC,
|
||||
0x3793A651, 0x3352BBE6, 0x3E119D3F, 0x3AD08088,
|
||||
0x2497D08D, 0x2056CD3A, 0x2D15EBE3, 0x29D4F654,
|
||||
0xC5A92679, 0xC1683BCE, 0xCC2B1D17, 0xC8EA00A0,
|
||||
0xD6AD50A5, 0xD26C4D12, 0xDF2F6BCB, 0xDBEE767C,
|
||||
0xE3A1CBC1, 0xE760D676, 0xEA23F0AF, 0xEEE2ED18,
|
||||
0xF0A5BD1D, 0xF464A0AA, 0xF9278673, 0xFDE69BC4,
|
||||
0x89B8FD09, 0x8D79E0BE, 0x803AC667, 0x84FBDBD0,
|
||||
0x9ABC8BD5, 0x9E7D9662, 0x933EB0BB, 0x97FFAD0C,
|
||||
0xAFB010B1, 0xAB710D06, 0xA6322BDF, 0xA2F33668,
|
||||
0xBCB4666D, 0xB8757BDA, 0xB5365D03, 0xB1F740B4
|
||||
)
|
||||
|
||||
|
||||
def to_lower(charcode):
|
||||
if charcode <= 64 or charcode > 90:
|
||||
return charcode
|
||||
else:
|
||||
return charcode + 32
|
||||
|
||||
# Not sure what Schlechtwetterfront means by "Simulate unsigned behavior.",
|
||||
# kept it anyways just without the extra functions
|
||||
def to_crc(string):
|
||||
crc_ = ~0 & 0xFFFFFFFF
|
||||
if string:
|
||||
for char in string:
|
||||
ind = (crc_ >> 24) ^ to_lower(ord(char))
|
||||
crc_ = ((crc_ << 8) & 0xFFFFFFFF) ^ table32_lookup[ind]
|
||||
return ~crc_ & 0xFFFFFFFF
|
|
@ -0,0 +1,100 @@
|
|||
""" Converts currently active Action to an msh Animation """
|
||||
|
||||
import bpy
|
||||
import math
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
from itertools import zip_longest
|
||||
from .msh_model import *
|
||||
from .msh_model_utilities import *
|
||||
from .msh_utilities import *
|
||||
from .msh_model_gather import *
|
||||
|
||||
from .msh_skeleton_utilities import *
|
||||
|
||||
from .crc import to_crc
|
||||
|
||||
|
||||
'''
|
||||
Convert the active Action into an Animation. When exported SWBF anims, there is the issue
|
||||
that all bones in the anim must be in the skeleton/basepose anim. We guarantee this by
|
||||
only keying bones if they are in the armature's preserved skeleton (swbf_msh_skel) and
|
||||
adding dummy frames if the bones are not in the armature.
|
||||
|
||||
If a preserved skeleton is not present, we include only the keyed bones and add dummy frames for
|
||||
the root (root_name)
|
||||
'''
|
||||
|
||||
def extract_anim(armature: bpy.types.Armature, root_name: str) -> Animation:
|
||||
|
||||
if not armature.animation_data or not armature.animation_data.action:
|
||||
raise RuntimeError("Cannot export animation data without an active Action on armature!")
|
||||
|
||||
action = armature.animation_data.action
|
||||
|
||||
|
||||
# Set of bones to include in SKL2/animation stuff
|
||||
keyable_bones = get_real_BONES(armature)
|
||||
|
||||
# If it doesn't have a preserved skeleton, then we add the scene root.
|
||||
# If it does have a preserved skeleton, any objects not animatable by blender (i.e. objects above the skeleton, scene root)
|
||||
# will be included in the preserved skeleton
|
||||
if not has_preserved_skeleton(armature):
|
||||
keyable_bones.add(root_name)
|
||||
|
||||
# Subset of above bones to key with dummy frames (all bones not in armature)
|
||||
dummy_bones = set([keyable_bone for keyable_bone in keyable_bones if keyable_bone not in armature.data.bones])
|
||||
|
||||
|
||||
anim = Animation();
|
||||
|
||||
root_crc = to_crc(root_name)
|
||||
|
||||
if not action:
|
||||
framerange = Vector((0.0,1.0))
|
||||
else:
|
||||
framerange = action.frame_range
|
||||
|
||||
num_frames = math.floor(framerange.y - framerange.x) + 1
|
||||
increment = (framerange.y - framerange.x) / (num_frames - 1)
|
||||
|
||||
anim.end_index = num_frames - 1
|
||||
|
||||
|
||||
for keyable_bone in keyable_bones:
|
||||
anim.bone_frames[to_crc(keyable_bone)] = ([], [])
|
||||
|
||||
|
||||
for frame in range(num_frames):
|
||||
|
||||
frame_time = framerange.x + frame * increment
|
||||
bpy.context.scene.frame_set(frame_time)
|
||||
|
||||
for keyable_bone in keyable_bones:
|
||||
|
||||
bone_crc = to_crc(keyable_bone)
|
||||
|
||||
if keyable_bone in dummy_bones:
|
||||
|
||||
rframe = RotationFrame(frame, convert_rotation_space(Quaternion()))
|
||||
tframe = TranslationFrame(frame, Vector((0.0,0.0,0.0)))
|
||||
|
||||
else:
|
||||
|
||||
bone = armature.pose.bones[keyable_bone]
|
||||
|
||||
transform = bone.matrix
|
||||
|
||||
if bone.parent:
|
||||
transform = bone.parent.matrix.inverted() @ transform
|
||||
|
||||
loc, rot, _ = transform.decompose()
|
||||
|
||||
rframe = RotationFrame(frame, convert_rotation_space(rot))
|
||||
tframe = TranslationFrame(frame, convert_vector_space(loc))
|
||||
|
||||
anim.bone_frames[bone_crc][0].append(tframe)
|
||||
anim.bone_frames[bone_crc][1].append(rframe)
|
||||
|
||||
|
||||
return anim
|
|
@ -0,0 +1,16 @@
|
|||
""" IntProperty needed to keep track of Collision Primitives types that are imported without indicitive names.
|
||||
Not sure I needed a PropertyGroup/what a leaner method would be. The prims shouldn't be renamed on import because
|
||||
they are often referenced in ODFs.
|
||||
|
||||
Don't see a reason these should be exposed via a panel or need to be changed..."""
|
||||
|
||||
import bpy
|
||||
from bpy.props import IntProperty
|
||||
from bpy.types import PropertyGroup
|
||||
|
||||
|
||||
class CollisionPrimitiveProperties(PropertyGroup):
|
||||
prim_type: IntProperty(name="Primitive Type", default=-1)
|
||||
|
||||
|
||||
|
|
@ -19,6 +19,31 @@ class Rendertype(Enum):
|
|||
NORMALMAPPED = 27
|
||||
NORMALMAPPED_TILED_ENVMAP = 29
|
||||
|
||||
# Placeholders to avoid crashes/import-export inconsistencies
|
||||
OTHER_1 = 1
|
||||
OTHER_2 = 2
|
||||
OTHER_4 = 4
|
||||
OTHER_5 = 5
|
||||
OTHER_8 = 8
|
||||
OTHER_9 = 9
|
||||
OTHER_10 = 10
|
||||
OTHER_11 = 11
|
||||
OTHER_12 = 12
|
||||
OTHER_13 = 13
|
||||
OTHER_14 = 14
|
||||
OTHER_15 = 15
|
||||
OTHER_16 = 16
|
||||
OTHER_17 = 17
|
||||
OTHER_18 = 18
|
||||
OTHER_19 = 19
|
||||
OTHER_20 = 20
|
||||
OTHER_21 = 21
|
||||
OTHER_23 = 23
|
||||
OTHER_28 = 28
|
||||
OTHER_30 = 30
|
||||
OTHER_31 = 31
|
||||
|
||||
|
||||
class MaterialFlags(Flag):
|
||||
NONE = 0
|
||||
UNLIT = 1
|
||||
|
@ -32,8 +57,9 @@ class MaterialFlags(Flag):
|
|||
|
||||
@dataclass
|
||||
class Material:
|
||||
""" Data class representing a .msh material.
|
||||
Intended to be stored in a dictionary so name is missing. """
|
||||
""" Data class representing a .msh material."""
|
||||
|
||||
name: str = ""
|
||||
|
||||
specular_color: Color = Color((1.0, 1.0, 1.0))
|
||||
rendertype: Rendertype = Rendertype.NORMAL
|
||||
|
|
|
@ -5,6 +5,8 @@ import bpy
|
|||
from typing import Dict
|
||||
from .msh_material import *
|
||||
|
||||
from .msh_material_utilities import _RENDERTYPES_MAPPING
|
||||
|
||||
def gather_materials() -> Dict[str, Material]:
|
||||
""" Gathers the Blender materials and returns them as
|
||||
a dictionary of strings and Material objects. """
|
||||
|
@ -22,36 +24,36 @@ def read_material(blender_material: bpy.types.Material) -> Material:
|
|||
|
||||
result = Material()
|
||||
|
||||
if blender_material.swbf_msh is None:
|
||||
if blender_material.swbf_msh_mat is None:
|
||||
return result
|
||||
|
||||
props = blender_material.swbf_msh
|
||||
props = blender_material.swbf_msh_mat
|
||||
|
||||
result.specular_color = props.specular_color.copy()
|
||||
result.rendertype = _read_material_props_rendertype(props)
|
||||
result.flags = _read_material_props_flags(props)
|
||||
result.data = _read_material_props_data(props)
|
||||
result.texture0 = props.diffuse_map
|
||||
result.texture1 = _read_normal_map_or_distortion_map_texture(props)
|
||||
result.texture2 = _read_detail_texture(props)
|
||||
result.texture3 = _read_envmap_texture(props)
|
||||
|
||||
if "UNSUPPORTED" not in props.rendertype:
|
||||
result.texture0 = props.diffuse_map
|
||||
result.texture1 = _read_normal_map_or_distortion_map_texture(props)
|
||||
result.texture2 = _read_detail_texture(props)
|
||||
result.texture3 = _read_envmap_texture(props)
|
||||
|
||||
else:
|
||||
result.texture0 = props.texture_0
|
||||
result.texture1 = props.texture_1
|
||||
result.texture2 = props.texture_2
|
||||
result.texture3 = props.texture_3
|
||||
|
||||
return result
|
||||
|
||||
_RENDERTYPES_MAPPING = {
|
||||
"NORMAL_BF2": Rendertype.NORMAL,
|
||||
"SCROLLING_BF2": Rendertype.SCROLLING,
|
||||
"ENVMAPPED_BF2": Rendertype.ENVMAPPED,
|
||||
"ANIMATED_BF2": Rendertype.ANIMATED,
|
||||
"REFRACTION_BF2": Rendertype.REFRACTION,
|
||||
"BLINK_BF2": Rendertype.BLINK,
|
||||
"NORMALMAPPED_TILED_BF2": Rendertype.NORMALMAPPED_TILED,
|
||||
"NORMALMAPPED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_ENVMAPPED,
|
||||
"NORMALMAPPED_BF2": Rendertype.NORMALMAPPED,
|
||||
"NORMALMAPPED_TILED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_TILED_ENVMAP}
|
||||
|
||||
def _read_material_props_rendertype(props) -> Rendertype:
|
||||
return _RENDERTYPES_MAPPING[props.rendertype]
|
||||
if "UNSUPPORTED" in props.rendertype:
|
||||
return Rendertype(props.rendertype_value)
|
||||
else:
|
||||
return _RENDERTYPES_MAPPING[props.rendertype]
|
||||
|
||||
def _read_material_props_flags(props) -> MaterialFlags:
|
||||
if "REFRACTION" in props.rendertype:
|
||||
|
@ -79,6 +81,8 @@ def _read_material_props_flags(props) -> MaterialFlags:
|
|||
return flags
|
||||
|
||||
def _read_material_props_data(props) -> Tuple[int, int]:
|
||||
if "UNSUPPORTED" in props.rendertype:
|
||||
return (props.data_value_0, props.data_value_1)
|
||||
if "SCROLLING" in props.rendertype:
|
||||
return (props.scroll_speed_u, props.scroll_speed_v)
|
||||
if "BLINK" in props.rendertype:
|
||||
|
|
|
@ -3,7 +3,11 @@
|
|||
import bpy
|
||||
from bpy.props import StringProperty, BoolProperty, EnumProperty, FloatVectorProperty, IntProperty
|
||||
from bpy.types import PropertyGroup
|
||||
|
||||
from .msh_material import *
|
||||
from .msh_material_ui_strings import *
|
||||
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
|
||||
|
||||
|
||||
UI_MATERIAL_RENDERTYPES = (
|
||||
('NORMAL_BF2', "00 Normal (SWBF2)", UI_RENDERTYPE_NORMAL_BF2_DESC),
|
||||
|
@ -15,7 +19,9 @@ UI_MATERIAL_RENDERTYPES = (
|
|||
('NORMALMAPPED_TILED_BF2', "24 Normalmapped Tiled (SWBF2)", UI_RENDERTYPE_NORMALMAPPED_TILED_BF2_DESC),
|
||||
('NORMALMAPPED_ENVMAPPED_BF2', "26 Normalmapped Envmapped (SWBF2)", UI_RENDERTYPE_NORMALMAPPED_ENVMAPPED_BF2_DESC),
|
||||
('NORMALMAPPED_BF2', "27 Normalmapped (SWBF2)", UI_RENDERTYPE_NORMALMAPPED_BF2_DESC),
|
||||
('NORMALMAPPED_TILED_ENVMAPPED_BF2', "29 Normalmapped Tiled Envmapped (SWBF2)", UI_RENDERTYPE_NORMALMAPPED_TILED_ENVMAPPED_BF2_DESC))
|
||||
('NORMALMAPPED_TILED_ENVMAPPED_BF2', "29 Normalmapped Tiled Envmapped (SWBF2)", UI_RENDERTYPE_NORMALMAPPED_TILED_ENVMAPPED_BF2_DESC),
|
||||
('UNSUPPORTED', "Other (SWBF1/2)", UI_RENDERTYPE_UNSUPPORTED_BF2_DESC))
|
||||
|
||||
|
||||
def _make_anim_length_entry(length):
|
||||
from math import sqrt
|
||||
|
@ -182,6 +188,18 @@ class MaterialProperties(PropertyGroup):
|
|||
"distort the scene behind them. Should be a normal map "
|
||||
"with '-forceformat v8u8' in it's '.tga.option' file.")
|
||||
|
||||
# Below props are for yet unsupported render types
|
||||
data_value_0: IntProperty(name="", description="First data value")
|
||||
data_value_1: IntProperty(name="", description="Second data value")
|
||||
|
||||
rendertype_value: IntProperty(name="Rendertype Value", description="Raw number value of rendertype.", min=0, max=31)
|
||||
|
||||
texture_0: StringProperty(name="1", description="First texture slot")
|
||||
texture_1: StringProperty(name="2", description="Second texture slot")
|
||||
texture_2: StringProperty(name="3", description="Third texture slot")
|
||||
texture_3: StringProperty(name="4", description="Fourth texture slot")
|
||||
|
||||
|
||||
class MaterialPropertiesPanel(bpy.types.Panel):
|
||||
""" Creates a Panel in the Object properties window """
|
||||
bl_label = "SWBF .msh Properties"
|
||||
|
@ -190,15 +208,21 @@ class MaterialPropertiesPanel(bpy.types.Panel):
|
|||
bl_region_type = 'WINDOW'
|
||||
bl_context = "material"
|
||||
|
||||
|
||||
|
||||
def draw(self, context):
|
||||
if context.material is None:
|
||||
return
|
||||
|
||||
layout = self.layout
|
||||
|
||||
material_props = context.material.swbf_msh
|
||||
material_props = context.material.swbf_msh_mat
|
||||
|
||||
layout.prop(material_props, "rendertype")
|
||||
|
||||
if "UNSUPPORTED" in material_props.rendertype:
|
||||
layout.prop(material_props, "rendertype_value")
|
||||
|
||||
layout.prop(material_props, "specular_color")
|
||||
|
||||
if "REFRACTION" not in material_props.rendertype:
|
||||
|
@ -233,21 +257,31 @@ class MaterialPropertiesPanel(bpy.types.Panel):
|
|||
elif "NORMALMAPPED_TILED" in material_props.rendertype:
|
||||
row.prop(material_props, "normal_map_tiling_u")
|
||||
row.prop(material_props, "normal_map_tiling_v")
|
||||
elif "UNSUPPORTED" in material_props.rendertype:
|
||||
row.prop(material_props, "data_value_0")
|
||||
row.prop(material_props, "data_value_1")
|
||||
else:
|
||||
row.prop(material_props, "detail_map_tiling_u")
|
||||
row.prop(material_props, "detail_map_tiling_v")
|
||||
|
||||
layout.label(text="Texture Maps: ")
|
||||
layout.prop(material_props, "diffuse_map")
|
||||
if "UNSUPPORTED" not in material_props.rendertype:
|
||||
layout.prop(material_props, "diffuse_map")
|
||||
|
||||
if "REFRACTION" not in material_props.rendertype:
|
||||
layout.prop(material_props, "detail_map")
|
||||
if "REFRACTION" not in material_props.rendertype:
|
||||
layout.prop(material_props, "detail_map")
|
||||
|
||||
if "NORMALMAPPED" in material_props.rendertype:
|
||||
layout.prop(material_props, "normal_map")
|
||||
if "NORMALMAPPED" in material_props.rendertype:
|
||||
layout.prop(material_props, "normal_map")
|
||||
|
||||
if "ENVMAPPED" in material_props.rendertype:
|
||||
layout.prop(material_props, "environment_map")
|
||||
if "ENVMAPPED" in material_props.rendertype:
|
||||
layout.prop(material_props, "environment_map")
|
||||
|
||||
if "REFRACTION" in material_props.rendertype:
|
||||
layout.prop(material_props, "distortion_map")
|
||||
else:
|
||||
layout.prop(material_props, "texture_0")
|
||||
layout.prop(material_props, "texture_1")
|
||||
layout.prop(material_props, "texture_2")
|
||||
layout.prop(material_props, "texture_3")
|
||||
|
||||
if "REFRACTION" in material_props.rendertype:
|
||||
layout.prop(material_props, "distortion_map")
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
""" For finding textures and assigning MaterialProperties from entries in a Material """
|
||||
|
||||
import bpy
|
||||
from typing import Dict
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import *
|
||||
from .msh_material_properties import *
|
||||
|
||||
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
|
||||
|
||||
from math import sqrt
|
||||
|
||||
import os
|
||||
|
||||
|
||||
|
||||
def find_texture_path(folder_path : str, name : str) -> str:
|
||||
|
||||
if not folder_path or not name:
|
||||
return ""
|
||||
|
||||
possible_paths = [
|
||||
os.path.join(folder_path, name),
|
||||
os.path.join(folder_path, "PC", name),
|
||||
os.path.join(folder_path, "pc", name),
|
||||
os.path.join(folder_path, ".." , name),
|
||||
]
|
||||
|
||||
for possible_path in possible_paths:
|
||||
if os.path.exists(possible_path):
|
||||
return possible_path
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
|
||||
def fill_material_props(material : Material, material_properties):
|
||||
""" Fills MaterialProperties from Material instance """
|
||||
|
||||
if material_properties is None or material is None:
|
||||
return
|
||||
|
||||
material_properties.rendertype_value = material.rendertype.value
|
||||
|
||||
material_properties.specular_color = (material.specular_color[0], material.specular_color[1], material.specular_color[2])
|
||||
|
||||
_fill_material_props_rendertype(material, material_properties)
|
||||
_fill_material_props_flags(material, material_properties)
|
||||
_fill_material_props_data(material, material_properties)
|
||||
_fill_material_props_texture_maps(material, material_properties)
|
||||
|
||||
|
||||
|
||||
def _fill_material_props_rendertype(material, material_properties):
|
||||
if material.rendertype in _REVERSE_RENDERTYPES_MAPPING:
|
||||
material_properties.rendertype = _REVERSE_RENDERTYPES_MAPPING[material.rendertype]
|
||||
else:
|
||||
material_properties.rendertype = "UNSUPPORTED"
|
||||
|
||||
|
||||
def _fill_material_props_flags(material, material_properties):
|
||||
if material.rendertype == Rendertype.REFRACTION:
|
||||
material_properties.blended_transparency = True
|
||||
return
|
||||
|
||||
flags = material.flags
|
||||
|
||||
material_properties.blended_transparency = bool(flags & MaterialFlags.BLENDED_TRANSPARENCY)
|
||||
material_properties.additive_transparency = bool(flags & MaterialFlags.ADDITIVE_TRANSPARENCY)
|
||||
material_properties.hardedged_transparency = bool(flags & MaterialFlags.HARDEDGED_TRANSPARENCY)
|
||||
material_properties.unlit = bool(flags & MaterialFlags.UNLIT)
|
||||
material_properties.glow = bool(flags & MaterialFlags.GLOW)
|
||||
material_properties.perpixel = bool(flags & MaterialFlags.PERPIXEL)
|
||||
material_properties.specular = bool(flags & MaterialFlags.SPECULAR)
|
||||
material_properties.doublesided = bool(flags & MaterialFlags.DOUBLESIDED)
|
||||
|
||||
|
||||
def _fill_material_props_data(material, material_properties):
|
||||
|
||||
material_properties.data_value_0 = material.data[0]
|
||||
material_properties.data_value_1 = material.data[1]
|
||||
|
||||
material_properties.scroll_speed_u = material.data[0]
|
||||
material_properties.scroll_speed_v = material.data[1]
|
||||
|
||||
material_properties.blink_min_brightness = material.data[0]
|
||||
material_properties.blink_speed = material.data[1]
|
||||
|
||||
material_properties.normal_map_tiling_u = material.data[0]
|
||||
material_properties.normal_map_tiling_v = material.data[1]
|
||||
|
||||
anim_length_index = int(sqrt(material.data[0]))
|
||||
if anim_length_index < 0:
|
||||
anim_length_index = 0
|
||||
elif anim_length_index > len(UI_MATERIAL_ANIMATION_LENGTHS):
|
||||
anim_length_index = len(UI_MATERIAL_ANIMATION_LENGTHS) - 1
|
||||
|
||||
material_properties.animation_length = UI_MATERIAL_ANIMATION_LENGTHS[anim_length_index][0]
|
||||
material_properties.animation_speed = material.data[1]
|
||||
|
||||
material_properties.detail_map_tiling_u = material.data[0]
|
||||
material_properties.detail_map_tiling_v = material.data[1]
|
||||
|
||||
|
||||
def _fill_material_props_texture_maps(material, material_properties):
|
||||
|
||||
material_properties.texture_0 = material.texture0
|
||||
material_properties.texture_1 = material.texture1
|
||||
material_properties.texture_2 = material.texture2
|
||||
material_properties.texture_3 = material.texture3
|
||||
|
||||
material_properties.diffuse_map = material.texture0
|
||||
material_properties.distortion_map = material.texture1
|
||||
material_properties.normal_map = material.texture1
|
||||
material_properties.detail_map = material.texture2
|
||||
material_properties.environment_map = material.texture3
|
|
@ -1,5 +1,10 @@
|
|||
""" UI strings that are too long to have in msh_materials_properties.py """
|
||||
|
||||
|
||||
UI_RENDERTYPE_UNSUPPORTED_BF2_DESC = \
|
||||
"Unsupported rendertype. The raw values of the material "\
|
||||
"are fully accessible, but their purpose is unknown. "
|
||||
|
||||
UI_RENDERTYPE_DETAIL_MAP_DESC = \
|
||||
"Can optionally have a Detail Map."
|
||||
|
||||
|
|
|
@ -4,6 +4,35 @@ from typing import Dict, List
|
|||
from .msh_material import *
|
||||
from .msh_model import *
|
||||
|
||||
|
||||
_RENDERTYPES_MAPPING = {
|
||||
"NORMAL_BF2": Rendertype.NORMAL,
|
||||
"SCROLLING_BF2": Rendertype.SCROLLING,
|
||||
"ENVMAPPED_BF2": Rendertype.ENVMAPPED,
|
||||
"ANIMATED_BF2": Rendertype.ANIMATED,
|
||||
"REFRACTION_BF2": Rendertype.REFRACTION,
|
||||
"BLINK_BF2": Rendertype.BLINK,
|
||||
"NORMALMAPPED_TILED_BF2": Rendertype.NORMALMAPPED_TILED,
|
||||
"NORMALMAPPED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_ENVMAPPED,
|
||||
"NORMALMAPPED_BF2": Rendertype.NORMALMAPPED,
|
||||
"NORMALMAPPED_TILED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_TILED_ENVMAP}
|
||||
|
||||
|
||||
_REVERSE_RENDERTYPES_MAPPING = {
|
||||
Rendertype.NORMAL : "NORMAL_BF2",
|
||||
Rendertype.SCROLLING : "SCROLLING_BF2",
|
||||
Rendertype.ENVMAPPED : "ENVMAPPED_BF2",
|
||||
Rendertype.ANIMATED : "ANIMATED_BF2",
|
||||
Rendertype.REFRACTION : "REFRACTION_BF2",
|
||||
Rendertype.BLINK : "BLINK_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED : "NORMALMAPPED_TILED_BF2",
|
||||
Rendertype.NORMALMAPPED_ENVMAPPED : "NORMALMAPPED_ENVMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED : "NORMALMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED_ENVMAP : "NORMALMAPPED_TILED_ENVMAPPED_BF2"}
|
||||
|
||||
|
||||
|
||||
|
||||
def remove_unused_materials(materials: Dict[str, Material],
|
||||
models: List[Model]) -> Dict[str, Material]:
|
||||
""" Given a dictionary of materials and a list of models
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
saved to a .msh file. """
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from typing import List, Tuple, Dict
|
||||
from enum import Enum
|
||||
from mathutils import Vector, Quaternion
|
||||
|
||||
|
@ -13,11 +13,16 @@ class ModelType(Enum):
|
|||
BONE = 3
|
||||
STATIC = 4
|
||||
|
||||
# Maybe there are only for BF1 models (http://www.secretsociety.com/forum/downloads/BF1/BF1%20Mesh%20File%20Format.txt)?
|
||||
# According to that link #3 is envelope, not bone, maybe that's for TCW or smthg
|
||||
# CHILDSKIN = 5 # I didnt bother with these, never encountered one and they might need adjustments to vertex data
|
||||
SHADOWVOLUME = 6 # Pretty common
|
||||
|
||||
class CollisionPrimitiveShape(Enum):
|
||||
SPHERE = 0
|
||||
# ELLIPSOID = 1
|
||||
ELLIPSOID = 1
|
||||
CYLINDER = 2
|
||||
# MESH = 3
|
||||
MESH = 3
|
||||
BOX = 4
|
||||
|
||||
@dataclass
|
||||
|
@ -27,22 +32,31 @@ class ModelTransform:
|
|||
translation: Vector = field(default_factory=Vector)
|
||||
rotation: Quaternion = field(default_factory=Quaternion)
|
||||
|
||||
@dataclass
|
||||
class VertexWeight:
|
||||
""" Class representing a vertex weight in a .msh file. """
|
||||
|
||||
weight: float = 1.0
|
||||
bone: int = 0
|
||||
|
||||
@dataclass
|
||||
class GeometrySegment:
|
||||
""" Class representing a 'SEGM' section in a .msh file. """
|
||||
|
||||
material_name: str = ""
|
||||
material_name: str = field(default_factory=str)
|
||||
|
||||
positions: List[Vector] = field(default_factory=list)
|
||||
normals: List[Vector] = field(default_factory=list)
|
||||
colors: List[List[float]] = None
|
||||
texcoords: List[Vector] = field(default_factory=list)
|
||||
# TODO: Skin support.
|
||||
|
||||
weights: List[List[VertexWeight]] = None
|
||||
|
||||
polygons: List[List[int]] = field(default_factory=list)
|
||||
triangles: List[List[int]] = field(default_factory=list)
|
||||
triangle_strips: List[List[int]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CollisionPrimitive:
|
||||
""" Class representing a 'SWCI' section in a .msh file. """
|
||||
|
@ -63,5 +77,33 @@ class Model:
|
|||
|
||||
transform: ModelTransform = field(default_factory=ModelTransform)
|
||||
|
||||
bone_map: List[str] = None
|
||||
|
||||
geometry: List[GeometrySegment] = None
|
||||
collisionprimitive: CollisionPrimitive = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class RotationFrame:
|
||||
|
||||
index : int = 0
|
||||
rotation : Quaternion = field(default_factory=Quaternion)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TranslationFrame:
|
||||
|
||||
index : int = 0
|
||||
translation : Vector = field(default_factory=Vector)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Animation:
|
||||
""" Class representing 'CYCL' + 'KFR3' sections in a .msh file """
|
||||
|
||||
name: str = "fullanimation"
|
||||
bone_frames: Dict[int, Tuple[List[TranslationFrame], List[RotationFrame]]] = field(default_factory=dict)
|
||||
|
||||
framerate: float = 29.97
|
||||
start_index : int = 0
|
||||
end_index : int = 0
|
||||
|
|
|
@ -9,12 +9,13 @@ from itertools import zip_longest
|
|||
from .msh_model import *
|
||||
from .msh_model_utilities import *
|
||||
from .msh_utilities import *
|
||||
from .msh_skeleton_utilities import *
|
||||
|
||||
SKIPPED_OBJECT_TYPES = {"LATTICE", "CAMERA", "LIGHT", "SPEAKER", "LIGHT_PROBE"}
|
||||
MESH_OBJECT_TYPES = {"MESH", "CURVE", "SURFACE", "META", "FONT", "GPENCIL"}
|
||||
MAX_MSH_VERTEX_COUNT = 32767
|
||||
|
||||
def gather_models(apply_modifiers: bool, export_target: str) -> List[Model]:
|
||||
def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool) -> Tuple[List[Model], bpy.types.Object]:
|
||||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
|
@ -23,6 +24,8 @@ def gather_models(apply_modifiers: bool, export_target: str) -> List[Model]:
|
|||
|
||||
models_list: List[Model] = []
|
||||
|
||||
armature_found = None
|
||||
|
||||
for uneval_obj in select_objects(export_target):
|
||||
if uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents:
|
||||
continue
|
||||
|
@ -30,43 +33,72 @@ def gather_models(apply_modifiers: bool, export_target: str) -> List[Model]:
|
|||
if apply_modifiers:
|
||||
obj = uneval_obj.evaluated_get(depsgraph)
|
||||
else:
|
||||
obj = uneval_obj
|
||||
obj = uneval_obj
|
||||
|
||||
check_for_bad_lod_suffix(obj)
|
||||
|
||||
local_translation, local_rotation, _ = obj.matrix_local.decompose()
|
||||
if obj.type == "ARMATURE":
|
||||
models_list += expand_armature(obj)
|
||||
armature_found = obj
|
||||
continue
|
||||
|
||||
model = Model()
|
||||
model.name = obj.name
|
||||
model.model_type = get_model_type(obj)
|
||||
model.model_type = get_model_type(obj, skeleton_only)
|
||||
model.hidden = get_is_model_hidden(obj)
|
||||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
|
||||
transform = obj.matrix_local
|
||||
|
||||
if obj.parent_bone:
|
||||
model.parent = obj.parent_bone
|
||||
|
||||
# matrix_local, when called on an armature child also parented to a bone, appears to be broken.
|
||||
# At the very least, the results contradict the docs...
|
||||
armature_relative_transform = obj.parent.matrix_world.inverted() @ obj.matrix_world
|
||||
transform = obj.parent.data.bones[obj.parent_bone].matrix_local.inverted() @ armature_relative_transform
|
||||
|
||||
else:
|
||||
if obj.parent is not None:
|
||||
if obj.parent.type == "ARMATURE":
|
||||
model.parent = obj.parent.parent.name if obj.parent.parent else ""
|
||||
transform = obj.parent.matrix_local @ transform
|
||||
else:
|
||||
model.parent = obj.parent.name
|
||||
|
||||
local_translation, local_rotation, _ = transform.decompose()
|
||||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
model.transform.translation = convert_vector_space(local_translation)
|
||||
|
||||
if obj.parent is not None:
|
||||
model.parent = obj.parent.name
|
||||
|
||||
if obj.type in MESH_OBJECT_TYPES:
|
||||
|
||||
mesh = obj.to_mesh()
|
||||
model.geometry = create_mesh_geometry(mesh)
|
||||
model.geometry = create_mesh_geometry(mesh, obj.vertex_groups)
|
||||
|
||||
obj.to_mesh_clear()
|
||||
|
||||
_, _, world_scale = obj.matrix_world.decompose()
|
||||
world_scale = convert_scale_space(world_scale)
|
||||
scale_segments(world_scale, model.geometry)
|
||||
|
||||
|
||||
for segment in model.geometry:
|
||||
if len(segment.positions) > MAX_MSH_VERTEX_COUNT:
|
||||
raise RuntimeError(f"Object '{obj.name}' has resulted in a .msh geometry segment that has "
|
||||
f"more than {MAX_MSH_VERTEX_COUNT} vertices! Split the object's mesh up "
|
||||
f"and try again!")
|
||||
if obj.vertex_groups:
|
||||
model.bone_map = [group.name for group in obj.vertex_groups]
|
||||
|
||||
|
||||
if get_is_collision_primitive(obj):
|
||||
model.collisionprimitive = get_collision_primitive(obj)
|
||||
|
||||
|
||||
models_list.append(model)
|
||||
|
||||
return models_list
|
||||
|
||||
return (models_list, armature_found)
|
||||
|
||||
|
||||
|
||||
def create_parents_set() -> Set[str]:
|
||||
""" Creates a set with the names of the Blender objects from the current scene
|
||||
|
@ -80,7 +112,7 @@ def create_parents_set() -> Set[str]:
|
|||
|
||||
return parents
|
||||
|
||||
def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
||||
def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[GeometrySegment]:
|
||||
""" Creates a list of GeometrySegment objects from a Blender mesh.
|
||||
Does NOT create triangle strips in the GeometrySegment however. """
|
||||
|
||||
|
@ -93,7 +125,7 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
material_count = max(len(mesh.materials), 1)
|
||||
|
||||
segments: List[GeometrySegment] = [GeometrySegment() for i in range(material_count)]
|
||||
vertex_cache: List[Dict[Tuple[float], int]] = [dict() for i in range(material_count)]
|
||||
vertex_cache = [dict() for i in range(material_count)]
|
||||
vertex_remap: List[Dict[Tuple[int, int], int]] = [dict() for i in range(material_count)]
|
||||
polygons: List[Set[int]] = [set() for i in range(material_count)]
|
||||
|
||||
|
@ -101,6 +133,10 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
for segment in segments:
|
||||
segment.colors = []
|
||||
|
||||
if has_weights:
|
||||
for segment in segments:
|
||||
segment.weights = []
|
||||
|
||||
for segment, material in zip(segments, mesh.materials):
|
||||
segment.material_name = material.name
|
||||
|
||||
|
@ -116,11 +152,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
|
||||
if use_smooth_normal or mesh.use_auto_smooth:
|
||||
if mesh.has_custom_normals:
|
||||
vertex_normal = mesh.loops[loop_index].normal
|
||||
vertex_normal = Vector( mesh.loops[loop_index].normal )
|
||||
else:
|
||||
vertex_normal = mesh.vertices[vertex_index].normal
|
||||
vertex_normal = Vector( mesh.vertices[vertex_index].normal )
|
||||
else:
|
||||
vertex_normal = face_normal
|
||||
vertex_normal = Vector(face_normal)
|
||||
|
||||
def get_cache_vertex():
|
||||
yield mesh.vertices[vertex_index].co.x
|
||||
|
@ -139,6 +175,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
for v in mesh.vertex_colors.active.data[loop_index].color:
|
||||
yield v
|
||||
|
||||
if segment.weights is not None:
|
||||
for v in mesh.vertices[vertex_index].groups:
|
||||
yield v.group
|
||||
yield v.weight
|
||||
|
||||
vertex_cache_entry = tuple(get_cache_vertex())
|
||||
cached_vertex_index = cache.get(vertex_cache_entry, vertex_cache_miss_index)
|
||||
|
||||
|
@ -162,6 +203,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
if segment.colors is not None:
|
||||
segment.colors.append(list(mesh.vertex_colors.active.data[loop_index].color))
|
||||
|
||||
if segment.weights is not None:
|
||||
groups = mesh.vertices[vertex_index].groups
|
||||
|
||||
segment.weights.append([VertexWeight(v.weight, v.group) for v in groups])
|
||||
|
||||
return new_index
|
||||
|
||||
for tri in mesh.loop_triangles:
|
||||
|
@ -179,12 +225,14 @@ def create_mesh_geometry(mesh: bpy.types.Mesh) -> List[GeometrySegment]:
|
|||
|
||||
return segments
|
||||
|
||||
def get_model_type(obj: bpy.types.Object) -> ModelType:
|
||||
def get_model_type(obj: bpy.types.Object, skel_only: bool) -> ModelType:
|
||||
""" Get the ModelType for a Blender object. """
|
||||
# TODO: Skinning support, etc
|
||||
|
||||
if obj.type in MESH_OBJECT_TYPES:
|
||||
return ModelType.STATIC
|
||||
if obj.type in MESH_OBJECT_TYPES and not skel_only:
|
||||
if obj.vertex_groups:
|
||||
return ModelType.SKIN
|
||||
else:
|
||||
return ModelType.STATIC
|
||||
|
||||
return ModelType.NULL
|
||||
|
||||
|
@ -193,6 +241,8 @@ def get_is_model_hidden(obj: bpy.types.Object) -> bool:
|
|||
|
||||
name = obj.name.lower()
|
||||
|
||||
if name.startswith("c_"):
|
||||
return True
|
||||
if name.startswith("sv_"):
|
||||
return True
|
||||
if name.startswith("p_"):
|
||||
|
@ -249,6 +299,9 @@ def get_collision_primitive(obj: bpy.types.Object) -> CollisionPrimitive:
|
|||
|
||||
return primitive
|
||||
|
||||
|
||||
|
||||
|
||||
def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveShape:
|
||||
""" Gets the CollisionPrimitiveShape of an object or raises an error if
|
||||
it can't. """
|
||||
|
@ -262,8 +315,14 @@ def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveSh
|
|||
if "box" in name or "cube" in name or "cuboid" in name:
|
||||
return CollisionPrimitiveShape.BOX
|
||||
|
||||
# arc170 fighter has examples of box colliders without proper naming
|
||||
prim_type = obj.swbf_msh_coll_prim.prim_type
|
||||
if prim_type in [item.value for item in CollisionPrimitiveShape]:
|
||||
return CollisionPrimitiveShape(prim_type)
|
||||
|
||||
raise RuntimeError(f"Object '{obj.name}' has no primitive type specified in it's name!")
|
||||
|
||||
|
||||
def check_for_bad_lod_suffix(obj: bpy.types.Object):
|
||||
""" Checks if the object has an LOD suffix that is known to be ignored by """
|
||||
|
||||
|
@ -320,11 +379,41 @@ def select_objects(export_target: str) -> List[bpy.types.Object]:
|
|||
|
||||
return objects + parents
|
||||
|
||||
def convert_vector_space(vec: Vector) -> Vector:
|
||||
return Vector((-vec.x, vec.z, vec.y))
|
||||
|
||||
def convert_scale_space(vec: Vector) -> Vector:
|
||||
return Vector(vec.xzy)
|
||||
|
||||
def convert_rotation_space(quat: Quaternion) -> Quaternion:
|
||||
return Quaternion((-quat.w, quat.x, -quat.z, -quat.y))
|
||||
def expand_armature(armature: bpy.types.Object) -> List[Model]:
|
||||
|
||||
proper_BONES = get_real_BONES(armature)
|
||||
|
||||
bones: List[Model] = []
|
||||
|
||||
for bone in armature.data.bones:
|
||||
model = Model()
|
||||
|
||||
transform = bone.matrix_local
|
||||
|
||||
if bone.parent:
|
||||
transform = bone.parent.matrix_local.inverted() @ transform
|
||||
model.parent = bone.parent.name
|
||||
# If the bone has no parent_bone:
|
||||
# set model parent to SKIN object if there is one
|
||||
# set model parent to armature parent if there is one
|
||||
else:
|
||||
for child_obj in armature.children:
|
||||
if child_obj.vertex_groups and not get_is_model_hidden(child_obj) and not child_obj.parent_bone:
|
||||
model.parent = child_obj.name
|
||||
break
|
||||
if not model.parent and armature.parent:
|
||||
model.parent = armature.parent.name
|
||||
|
||||
|
||||
local_translation, local_rotation, _ = transform.decompose()
|
||||
|
||||
model.model_type = ModelType.BONE if bone.name in proper_BONES else ModelType.NULL
|
||||
model.name = bone.name
|
||||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
model.transform.translation = convert_vector_space(local_translation)
|
||||
|
||||
bones.append(model)
|
||||
|
||||
return bones
|
||||
|
|
|
@ -3,8 +3,41 @@
|
|||
from typing import List
|
||||
from .msh_model import *
|
||||
from .msh_utilities import *
|
||||
import mathutils
|
||||
import math
|
||||
from mathutils import Vector, Matrix
|
||||
|
||||
|
||||
def inject_dummy_data(model : Model):
|
||||
""" Adds a triangle and material to the model (scene root). Needed to export zenasst-compatible skeletons. """
|
||||
model.hidden = True
|
||||
|
||||
dummy_seg = GeometrySegment()
|
||||
dummy_seg.material_name = ""
|
||||
|
||||
dummy_seg.positions = [Vector((0.0,0.1,0.0)), Vector((0.1,0.0,0.0)), Vector((0.0,0.0,0.1))]
|
||||
dummy_seg.normals = [Vector((0.0,1.0,0.0)), Vector((1.0,0.0,0.0)), Vector((0.0,0.0,1.0))]
|
||||
dummy_seg.texcoords = [Vector((0.1,0.1)), Vector((0.2,0.2)), Vector((0.3,0.3))]
|
||||
tri = [[0,1,2]]
|
||||
dummy_seg.triangles = tri
|
||||
dummy_seg.polygons = tri
|
||||
dummy_seg.triangle_strips = tri
|
||||
|
||||
model.geometry = [dummy_seg]
|
||||
model.model_type = ModelType.STATIC
|
||||
|
||||
def convert_vector_space(vec: Vector) -> Vector:
|
||||
return Vector((-vec.x, vec.z, vec.y))
|
||||
|
||||
def convert_scale_space(vec: Vector) -> Vector:
|
||||
return Vector(vec.xzy)
|
||||
|
||||
def convert_rotation_space(quat: Quaternion) -> Quaternion:
|
||||
return Quaternion((-quat.w, quat.x, -quat.z, -quat.y))
|
||||
|
||||
def model_transform_to_matrix(transform: ModelTransform):
|
||||
return Matrix.Translation(convert_vector_space(transform.translation)) @ convert_rotation_space(transform.rotation).to_matrix().to_4x4()
|
||||
|
||||
def scale_segments(scale: Vector, segments: List[GeometrySegment]):
|
||||
""" Scales are positions in the GeometrySegment list. """
|
||||
|
||||
|
@ -114,3 +147,4 @@ def is_model_name_unused(name: str, models: List[Model]) -> bool:
|
|||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -4,17 +4,15 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict
|
||||
from copy import copy
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
from .msh_model import Model
|
||||
from .msh_model_gather import gather_models
|
||||
from .msh_model_utilities import sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix
|
||||
from .msh_model_triangle_strips import create_models_triangle_strips
|
||||
|
||||
from .msh_model import Model, Animation, ModelType
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import gather_materials
|
||||
from .msh_material_utilities import remove_unused_materials
|
||||
from .msh_utilities import *
|
||||
|
||||
|
||||
@dataclass
|
||||
class SceneAABB:
|
||||
""" Class representing an axis-aligned bounding box. """
|
||||
|
@ -44,53 +42,6 @@ class Scene:
|
|||
materials: Dict[str, Material] = field(default_factory=dict)
|
||||
models: List[Model] = field(default_factory=list)
|
||||
|
||||
def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_target: str) -> Scene:
|
||||
""" Create a msh Scene from the active Blender scene. """
|
||||
animation: Animation = None
|
||||
|
||||
scene = Scene()
|
||||
|
||||
scene.name = bpy.context.scene.name
|
||||
|
||||
scene.materials = gather_materials()
|
||||
|
||||
scene.models = gather_models(apply_modifiers=apply_modifiers, export_target=export_target)
|
||||
scene.models = sort_by_parent(scene.models)
|
||||
|
||||
if generate_triangle_strips:
|
||||
scene.models = create_models_triangle_strips(scene.models)
|
||||
else:
|
||||
for model in scene.models:
|
||||
if model.geometry:
|
||||
for segment in model.geometry:
|
||||
segment.triangle_strips = segment.triangles
|
||||
|
||||
if has_multiple_root_models(scene.models):
|
||||
scene.models = reparent_model_roots(scene.models)
|
||||
|
||||
scene.materials = remove_unused_materials(scene.materials, scene.models)
|
||||
|
||||
return scene
|
||||
|
||||
def create_scene_aabb(scene: Scene) -> SceneAABB:
|
||||
""" Create a SceneAABB for a Scene. """
|
||||
|
||||
global_aabb = SceneAABB()
|
||||
|
||||
for model in scene.models:
|
||||
if model.geometry is None or model.hidden:
|
||||
continue
|
||||
|
||||
model_world_matrix = get_model_world_matrix(model, scene.models)
|
||||
model_aabb = SceneAABB()
|
||||
|
||||
for segment in model.geometry:
|
||||
segment_aabb = SceneAABB()
|
||||
|
||||
for pos in segment.positions:
|
||||
segment_aabb.integrate_position(model_world_matrix @ pos)
|
||||
|
||||
model_aabb.integrate_aabb(segment_aabb)
|
||||
|
||||
global_aabb.integrate_aabb(model_aabb)
|
||||
|
||||
return global_aabb
|
||||
skeleton: List[int] = field(default_factory=list)
|
|
@ -0,0 +1,470 @@
|
|||
""" Contains functions for extracting a scene from a .msh file"""
|
||||
|
||||
from itertools import islice
|
||||
from typing import Dict
|
||||
from .msh_scene import Scene
|
||||
from .msh_model import *
|
||||
from .msh_material import *
|
||||
from .msh_utilities import *
|
||||
|
||||
from .crc import *
|
||||
|
||||
from .chunked_file_reader import Reader
|
||||
|
||||
|
||||
|
||||
# Current model position
|
||||
model_counter = 0
|
||||
|
||||
# Used to remap MNDX to the MODL's actual position
|
||||
mndx_remap : Dict[int, int] = {}
|
||||
|
||||
# How much to print
|
||||
debug_level = 0
|
||||
|
||||
|
||||
'''
|
||||
Debug levels just indicate how much info should be printed.
|
||||
0 = nothing
|
||||
1 = just blurbs about valuable info in the chunks
|
||||
2 = #1 + full chunk structure
|
||||
'''
|
||||
def read_scene(input_file, anim_only=False, debug=0) -> Scene:
|
||||
|
||||
global debug_level
|
||||
debug_level = debug
|
||||
|
||||
scene = Scene()
|
||||
scene.models = []
|
||||
scene.materials = {}
|
||||
|
||||
global mndx_remap
|
||||
mndx_remap = {}
|
||||
|
||||
global model_counter
|
||||
model_counter = 0
|
||||
|
||||
with Reader(file=input_file, debug=debug_level>0) as head:
|
||||
|
||||
head.skip_until("HEDR")
|
||||
|
||||
with head.read_child() as hedr:
|
||||
|
||||
while hedr.could_have_child():
|
||||
|
||||
next_header = hedr.peak_next_header()
|
||||
|
||||
if next_header == "MSH2":
|
||||
|
||||
with hedr.read_child() as msh2:
|
||||
|
||||
if not anim_only:
|
||||
materials_list = []
|
||||
|
||||
while (msh2.could_have_child()):
|
||||
|
||||
next_header = msh2.peak_next_header()
|
||||
|
||||
if next_header == "SINF":
|
||||
with msh2.read_child() as sinf:
|
||||
pass
|
||||
|
||||
elif next_header == "MATL":
|
||||
with msh2.read_child() as matl:
|
||||
materials_list += _read_matl_and_get_materials_list(matl)
|
||||
for i,mat in enumerate(materials_list):
|
||||
scene.materials[mat.name] = mat
|
||||
|
||||
elif next_header == "MODL":
|
||||
with msh2.read_child() as modl:
|
||||
scene.models.append(_read_modl(modl, materials_list))
|
||||
|
||||
else:
|
||||
msh2.skip_bytes(1)
|
||||
|
||||
elif next_header == "SKL2":
|
||||
with hedr.read_child() as skl2:
|
||||
num_bones = skl2.read_u32()
|
||||
scene.skeleton = [skl2.read_u32(5)[0] for i in range(num_bones)]
|
||||
|
||||
elif next_header == "ANM2":
|
||||
with hedr.read_child() as anm2:
|
||||
scene.animation = _read_anm2(anm2)
|
||||
|
||||
else:
|
||||
hedr.skip_bytes(1)
|
||||
|
||||
# Print models in skeleton
|
||||
if scene.skeleton and debug_level > 0:
|
||||
print("Skeleton models: ")
|
||||
for model in scene.models:
|
||||
for i in range(len(scene.skeleton)):
|
||||
if to_crc(model.name) == scene.skeleton[i]:
|
||||
print("\t" + model.name)
|
||||
if model.model_type == ModelType.SKIN:
|
||||
scene.skeleton.pop(i)
|
||||
break
|
||||
|
||||
'''
|
||||
Iterate through every vertex weight in the scene and
|
||||
change its index to directly reference its bone's index.
|
||||
It will reference the MNDX of its bone's MODL by default.
|
||||
'''
|
||||
|
||||
for model in scene.models:
|
||||
if model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.weights:
|
||||
for weight_set in seg.weights:
|
||||
for vweight in weight_set:
|
||||
if vweight.bone in mndx_remap:
|
||||
vweight.bone = mndx_remap[vweight.bone]
|
||||
else:
|
||||
vweight.bone = 0
|
||||
|
||||
return scene
|
||||
|
||||
|
||||
def _read_matl_and_get_materials_list(matl: Reader) -> List[Material]:
|
||||
materials_list: List[Material] = []
|
||||
|
||||
num_mats = matl.read_u32()
|
||||
|
||||
for _ in range(num_mats):
|
||||
with matl.read_child() as matd:
|
||||
materials_list.append(_read_matd(matd))
|
||||
|
||||
return materials_list
|
||||
|
||||
|
||||
|
||||
def _read_matd(matd: Reader) -> Material:
|
||||
|
||||
mat = Material()
|
||||
|
||||
while matd.could_have_child():
|
||||
|
||||
next_header = matd.peak_next_header()
|
||||
|
||||
if next_header == "NAME":
|
||||
with matd.read_child() as name:
|
||||
mat.name = name.read_string()
|
||||
|
||||
elif next_header == "DATA":
|
||||
with matd.read_child() as data:
|
||||
data.read_f32(4) # Diffuse Color (Seams to get ignored by modelmunge)
|
||||
mat.specular_color = data.read_f32(4)
|
||||
data.read_f32(4) # Ambient Color (Seams to get ignored by modelmunge and Zero(?))
|
||||
data.read_f32() # Specular Exponent/Decay (Gets ignored by RedEngine in SWBFII for all known materials)
|
||||
|
||||
elif next_header == "ATRB":
|
||||
with matd.read_child() as atrb:
|
||||
mat.flags = MaterialFlags(atrb.read_u8())
|
||||
mat.rendertype = Rendertype(atrb.read_u8())
|
||||
mat.data = atrb.read_u8(2)
|
||||
|
||||
elif next_header == "TX0D":
|
||||
with matd.read_child() as tx0d:
|
||||
mat.texture0 = tx0d.read_string()
|
||||
|
||||
elif next_header == "TX1D":
|
||||
with matd.read_child() as tx1d:
|
||||
mat.texture1 = tx1d.read_string()
|
||||
|
||||
elif next_header == "TX2D":
|
||||
with matd.read_child() as tx2d:
|
||||
mat.texture2 = tx2d.read_string()
|
||||
|
||||
elif next_header == "TX3D":
|
||||
with matd.read_child() as tx3d:
|
||||
mat.texture3 = tx3d.read_string()
|
||||
|
||||
else:
|
||||
matd.skip_bytes(1)
|
||||
|
||||
return mat
|
||||
|
||||
|
||||
def _read_modl(modl: Reader, materials_list: List[Material]) -> Model:
|
||||
|
||||
model = Model()
|
||||
|
||||
while modl.could_have_child():
|
||||
|
||||
next_header = modl.peak_next_header()
|
||||
|
||||
if next_header == "MTYP":
|
||||
with modl.read_child() as mtyp:
|
||||
model.model_type = ModelType(mtyp.read_u32())
|
||||
|
||||
elif next_header == "MNDX":
|
||||
with modl.read_child() as mndx:
|
||||
index = mndx.read_u32()
|
||||
|
||||
global model_counter
|
||||
global mndx_remap
|
||||
mndx_remap[index] = model_counter
|
||||
|
||||
model_counter += 1
|
||||
|
||||
elif next_header == "NAME":
|
||||
with modl.read_child() as name:
|
||||
model.name = name.read_string()
|
||||
|
||||
elif next_header == "PRNT":
|
||||
with modl.read_child() as prnt:
|
||||
model.parent = prnt.read_string()
|
||||
|
||||
elif next_header == "FLGS":
|
||||
with modl.read_child() as flgs:
|
||||
model.hidden = flgs.read_u32()
|
||||
|
||||
elif next_header == "TRAN":
|
||||
with modl.read_child() as tran:
|
||||
model.transform = _read_tran(tran)
|
||||
|
||||
elif next_header == "GEOM":
|
||||
model.geometry = []
|
||||
envelope = []
|
||||
|
||||
with modl.read_child() as geom:
|
||||
|
||||
while geom.could_have_child():
|
||||
next_header_geom = geom.peak_next_header()
|
||||
|
||||
if next_header_geom == "SEGM":
|
||||
with geom.read_child() as segm:
|
||||
model.geometry.append(_read_segm(segm, materials_list))
|
||||
|
||||
elif next_header_geom == "ENVL":
|
||||
with geom.read_child() as envl:
|
||||
num_indicies = envl.read_u32()
|
||||
envelope += [envl.read_u32() for _ in range(num_indicies)]
|
||||
|
||||
elif next_header_geom == "CLTH":
|
||||
with geom.read_child() as clth:
|
||||
pass
|
||||
|
||||
else:
|
||||
geom.skip_bytes(1)
|
||||
|
||||
for seg in model.geometry:
|
||||
if seg.weights and envelope:
|
||||
for weight_set in seg.weights:
|
||||
for vertex_weight in weight_set:
|
||||
vertex_weight.bone = envelope[vertex_weight.bone]
|
||||
|
||||
elif next_header == "SWCI":
|
||||
prim = CollisionPrimitive()
|
||||
with modl.read_child() as swci:
|
||||
prim.shape = CollisionPrimitiveShape(swci.read_u32())
|
||||
prim.radius = swci.read_f32()
|
||||
prim.height = swci.read_f32()
|
||||
prim.length = swci.read_f32()
|
||||
model.collisionprimitive = prim
|
||||
|
||||
else:
|
||||
modl.skip_bytes(1)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def _read_tran(tran: Reader) -> ModelTransform:
|
||||
|
||||
xform = ModelTransform()
|
||||
|
||||
tran.skip_bytes(12) #ignore scale
|
||||
|
||||
xform.rotation = tran.read_quat()
|
||||
xform.translation = tran.read_vec()
|
||||
|
||||
return xform
|
||||
|
||||
|
||||
def _read_segm(segm: Reader, materials_list: List[Material]) -> GeometrySegment:
|
||||
|
||||
geometry_seg = GeometrySegment()
|
||||
|
||||
while segm.could_have_child():
|
||||
|
||||
next_header = segm.peak_next_header()
|
||||
|
||||
if next_header == "MATI":
|
||||
with segm.read_child() as mati:
|
||||
geometry_seg.material_name = materials_list[mati.read_u32()].name
|
||||
|
||||
elif next_header == "POSL":
|
||||
with segm.read_child() as posl:
|
||||
num_positions = posl.read_u32()
|
||||
|
||||
for _ in range(num_positions):
|
||||
geometry_seg.positions.append(Vector(posl.read_f32(3)))
|
||||
|
||||
elif next_header == "NRML":
|
||||
with segm.read_child() as nrml:
|
||||
num_normals = nrml.read_u32()
|
||||
|
||||
for _ in range(num_positions):
|
||||
geometry_seg.normals.append(Vector(nrml.read_f32(3)))
|
||||
|
||||
elif next_header == "CLRL":
|
||||
geometry_seg.colors = []
|
||||
|
||||
with segm.read_child() as clrl:
|
||||
num_colors = clrl.read_u32()
|
||||
|
||||
for _ in range(num_colors):
|
||||
geometry_seg.colors += unpack_color(clrl.read_u32())
|
||||
|
||||
elif next_header == "UV0L":
|
||||
with segm.read_child() as uv0l:
|
||||
num_texcoords = uv0l.read_u32()
|
||||
|
||||
for _ in range(num_texcoords):
|
||||
geometry_seg.texcoords.append(Vector(uv0l.read_f32(2)))
|
||||
|
||||
# TODO: Can't remember exact issue here...
|
||||
elif next_header == "NDXL":
|
||||
|
||||
with segm.read_child() as ndxl:
|
||||
pass
|
||||
'''
|
||||
num_polygons = ndxl.read_u32()
|
||||
|
||||
for _ in range(num_polygons):
|
||||
polygon = ndxl.read_u16(ndxl.read_u16())
|
||||
geometry_seg.polygons.append(polygon)
|
||||
'''
|
||||
|
||||
elif next_header == "NDXT":
|
||||
with segm.read_child() as ndxt:
|
||||
num_tris = ndxt.read_u32()
|
||||
|
||||
for _ in range(num_tris):
|
||||
geometry_seg.triangles.append(ndxt.read_u16(3))
|
||||
|
||||
# There could be major issues with this, so far it hasn't failed but its inelegance irks me
|
||||
elif next_header == "STRP":
|
||||
strips : List[List[int]] = []
|
||||
|
||||
with segm.read_child() as strp:
|
||||
num_indicies = strp.read_u32()
|
||||
|
||||
num_indicies_read = 0
|
||||
|
||||
curr_strip = []
|
||||
previous_flag = False
|
||||
|
||||
if num_indicies > 0:
|
||||
index, index1 = strp.read_u16(2)
|
||||
curr_strip = [index & 0x7fff, index1 & 0x7fff]
|
||||
num_indicies_read += 2
|
||||
|
||||
for i in range(num_indicies - 2):
|
||||
index = strp.read_u16(1)
|
||||
|
||||
if index & 0x8000 > 0:
|
||||
index = index & 0x7fff
|
||||
|
||||
if previous_flag:
|
||||
previous_flag = False
|
||||
curr_strip.append(index)
|
||||
strips.append(curr_strip[:-2])
|
||||
curr_strip = curr_strip[-2:]
|
||||
continue
|
||||
else:
|
||||
previous_flag = True
|
||||
|
||||
else:
|
||||
previous_flag = False
|
||||
|
||||
curr_strip.append(index)
|
||||
|
||||
geometry_seg.triangle_strips = strips
|
||||
|
||||
# TODO: Dont know how to handle trailing 0 bug yet: https://schlechtwetterfront.github.io/ze_filetypes/msh.html#STRP
|
||||
#if segm.read_u16 != 0:
|
||||
# segm.skip_bytes(-2)
|
||||
|
||||
elif next_header == "WGHT":
|
||||
with segm.read_child() as wght:
|
||||
|
||||
geometry_seg.weights = []
|
||||
num_weights = wght.read_u32()
|
||||
|
||||
for _ in range(num_weights):
|
||||
weight_set = []
|
||||
for _ in range(4):
|
||||
index = wght.read_u32()
|
||||
value = wght.read_f32()
|
||||
|
||||
if value > 0.000001:
|
||||
weight_set.append(VertexWeight(value,index))
|
||||
|
||||
geometry_seg.weights.append(weight_set)
|
||||
|
||||
else:
|
||||
segm.skip_bytes(1)
|
||||
|
||||
return geometry_seg
|
||||
|
||||
|
||||
|
||||
def _read_anm2(anm2: Reader) -> Animation:
|
||||
|
||||
anim = Animation()
|
||||
|
||||
while anm2.could_have_child():
|
||||
|
||||
next_header = anm2.peak_next_header()
|
||||
|
||||
if next_header == "CYCL":
|
||||
with anm2.read_child() as cycl:
|
||||
# Dont even know what CYCL's data does. Tried playing
|
||||
# with the values but didn't change anything in zenasset or ingame...
|
||||
|
||||
# Besides num_anims, which is never > 1 for any SWBF1/2 mshs I've seen
|
||||
|
||||
'''
|
||||
num_anims = cycl.read_u32()
|
||||
|
||||
for _ in range(num_anims):
|
||||
cycl.skip_bytes(64)
|
||||
print("CYCL play style {}".format(cycl.read_u32(4)[1]))
|
||||
'''
|
||||
pass
|
||||
|
||||
elif next_header == "KFR3":
|
||||
with anm2.read_child() as kfr3:
|
||||
|
||||
num_bones = kfr3.read_u32()
|
||||
|
||||
bone_crcs = []
|
||||
|
||||
for _ in range(num_bones):
|
||||
|
||||
bone_crc = kfr3.read_u32()
|
||||
bone_crcs.append(bone_crc)
|
||||
|
||||
frames = ([],[])
|
||||
|
||||
frametype = kfr3.read_u32()
|
||||
|
||||
num_loc_frames = kfr3.read_u32()
|
||||
num_rot_frames = kfr3.read_u32()
|
||||
|
||||
for i in range(num_loc_frames):
|
||||
frames[0].append(TranslationFrame(kfr3.read_u32(), kfr3.read_vec()))
|
||||
|
||||
for i in range(num_rot_frames):
|
||||
frames[1].append(RotationFrame(kfr3.read_u32(), kfr3.read_quat()))
|
||||
|
||||
anim.bone_frames[bone_crc] = frames
|
||||
|
||||
else:
|
||||
anm2.skip_bytes(1)
|
||||
|
||||
return anim
|
||||
|
||||
|
||||
|
|
@ -2,12 +2,16 @@
|
|||
|
||||
from itertools import islice
|
||||
from typing import Dict
|
||||
from .msh_scene import Scene, create_scene_aabb
|
||||
from .msh_scene import Scene
|
||||
from .msh_scene_utilities import create_scene_aabb
|
||||
from .msh_model import *
|
||||
from .msh_material import *
|
||||
from .msh_writer import Writer
|
||||
from .msh_utilities import *
|
||||
|
||||
from .crc import *
|
||||
|
||||
|
||||
def save_scene(output_file, scene: Scene):
|
||||
""" Saves scene to the supplied file. """
|
||||
|
||||
|
@ -17,6 +21,7 @@ def save_scene(output_file, scene: Scene):
|
|||
with msh2.create_child("SINF") as sinf:
|
||||
_write_sinf(sinf, scene)
|
||||
|
||||
model_index: Dict[str, int] = {model.name:(i+1) for i, model in enumerate(scene.models)}
|
||||
material_index: Dict[str, int] = {}
|
||||
|
||||
with msh2.create_child("MATL") as matl:
|
||||
|
@ -24,7 +29,24 @@ def save_scene(output_file, scene: Scene):
|
|||
|
||||
for index, model in enumerate(scene.models):
|
||||
with msh2.create_child("MODL") as modl:
|
||||
_write_modl(modl, model, index, material_index)
|
||||
_write_modl(modl, model, index, material_index, model_index)
|
||||
|
||||
# Contrary to earlier belief, anim/skel info does not need to be exported for animated models
|
||||
# BUT, unless a model is a BONE, it wont animate!
|
||||
# This is not necessary when exporting animations. When exporting animations, the following
|
||||
# chunks are necessary and the animated models can be marked as NULLs
|
||||
if scene.animation is not None:
|
||||
# Seems as though SKL2 is wholly unneccessary from SWBF's perspective (for models and anims),
|
||||
# but it is there in all stock models/anims
|
||||
with hedr.create_child("SKL2") as skl2:
|
||||
_write_skl2(skl2, scene.animation)
|
||||
|
||||
# Def not necessary, including anyways
|
||||
with hedr.create_child("BLN2") as bln2:
|
||||
_write_bln2(bln2, scene.animation)
|
||||
|
||||
with hedr.create_child("ANM2") as anm2:
|
||||
_write_anm2(anm2, scene.animation)
|
||||
|
||||
with hedr.create_child("CL1L"):
|
||||
pass
|
||||
|
@ -76,7 +98,7 @@ def _write_matd(matd: Writer, material_name: str, material: Material):
|
|||
data.write_f32(1.0, 1.0, 1.0, 1.0) # Diffuse Color (Seams to get ignored by modelmunge)
|
||||
data.write_f32(material.specular_color[0], material.specular_color[1],
|
||||
material.specular_color[2], 1.0)
|
||||
data.write_f32(0.0, 0.0, 0.0, 1.0) # Ambient Color (Seams to get ignored by modelmunge and Zero(?))
|
||||
data.write_f32(1.0, 1.0, 1.0, 1.0) # Ambient Color (Seams to get ignored by modelmunge and Zero(?))
|
||||
data.write_f32(50.0) # Specular Exponent/Decay (Gets ignored by RedEngine in SWBFII for all known materials)
|
||||
with matd.create_child("ATRB") as atrb:
|
||||
atrb.write_u8(material.flags.value)
|
||||
|
@ -97,12 +119,12 @@ def _write_matd(matd: Writer, material_name: str, material: Material):
|
|||
with matd.create_child("TX3D") as tx3d:
|
||||
tx3d.write_string(material.texture3)
|
||||
|
||||
def _write_modl(modl: Writer, model: Model, index: int, material_index: Dict[str, int]):
|
||||
def _write_modl(modl: Writer, model: Model, index: int, material_index: Dict[str, int], model_index: Dict[str, int]):
|
||||
with modl.create_child("MTYP") as mtyp:
|
||||
mtyp.write_u32(model.model_type.value)
|
||||
|
||||
with modl.create_child("MNDX") as mndx:
|
||||
mndx.write_u32(index)
|
||||
mndx.write_u32(index + 1)
|
||||
|
||||
with modl.create_child("NAME") as name:
|
||||
name.write_string(model.name)
|
||||
|
@ -120,10 +142,20 @@ def _write_modl(modl: Writer, model: Model, index: int, material_index: Dict[str
|
|||
|
||||
if model.geometry is not None:
|
||||
with modl.create_child("GEOM") as geom:
|
||||
|
||||
with geom.create_child("BBOX") as bbox:
|
||||
bbox.write_f32(0.0, 0.0, 0.0, 1.0)
|
||||
bbox.write_f32(0, 0, 0)
|
||||
bbox.write_f32(1.0,1.0,1.0,2.0)
|
||||
|
||||
for segment in model.geometry:
|
||||
with geom.create_child("SEGM") as segm:
|
||||
_write_segm(segm, segment, material_index)
|
||||
|
||||
if model.bone_map:
|
||||
with geom.create_child("ENVL") as envl:
|
||||
_write_envl(envl, model, model_index)
|
||||
|
||||
if model.collisionprimitive is not None:
|
||||
with modl.create_child("SWCI") as swci:
|
||||
swci.write_u32(model.collisionprimitive.shape.value)
|
||||
|
@ -147,10 +179,14 @@ def _write_segm(segm: Writer, segment: GeometrySegment, material_index: Dict[str
|
|||
for position in segment.positions:
|
||||
posl.write_f32(position.x, position.y, position.z)
|
||||
|
||||
if segment.weights:
|
||||
with segm.create_child("WGHT") as wght:
|
||||
_write_wght(wght, segment.weights)
|
||||
|
||||
with segm.create_child("NRML") as nrml:
|
||||
nrml.write_u32(len(segment.normals))
|
||||
|
||||
for normal in segment.normals:
|
||||
for i,normal in enumerate(segment.normals):
|
||||
nrml.write_f32(normal.x, normal.y, normal.z)
|
||||
|
||||
if segment.colors is not None:
|
||||
|
@ -160,11 +196,12 @@ def _write_segm(segm: Writer, segment: GeometrySegment, material_index: Dict[str
|
|||
for color in segment.colors:
|
||||
clrl.write_u32(pack_color(color))
|
||||
|
||||
with segm.create_child("UV0L") as uv0l:
|
||||
uv0l.write_u32(len(segment.texcoords))
|
||||
if segment.texcoords is not None:
|
||||
with segm.create_child("UV0L") as uv0l:
|
||||
uv0l.write_u32(len(segment.texcoords))
|
||||
|
||||
for texcoord in segment.texcoords:
|
||||
uv0l.write_f32(texcoord.x, texcoord.y)
|
||||
for texcoord in segment.texcoords:
|
||||
uv0l.write_f32(texcoord.x, texcoord.y)
|
||||
|
||||
with segm.create_child("NDXL") as ndxl:
|
||||
ndxl.write_u32(len(segment.polygons))
|
||||
|
@ -189,3 +226,82 @@ def _write_segm(segm: Writer, segment: GeometrySegment, material_index: Dict[str
|
|||
|
||||
for index in islice(strip, 2, len(strip)):
|
||||
strp.write_u16(index)
|
||||
|
||||
'''
|
||||
SKINNING CHUNKS
|
||||
'''
|
||||
def _write_wght(wght: Writer, weights: List[List[VertexWeight]]):
|
||||
wght.write_u32(len(weights))
|
||||
|
||||
for weight_list in weights:
|
||||
weight_list += [VertexWeight(0.0, 0)] * 4
|
||||
weight_list = sorted(weight_list, key=lambda w: w.weight, reverse=True)
|
||||
weight_list = weight_list[:4]
|
||||
|
||||
total_weight = max(sum(map(lambda w: w.weight, weight_list)), 1e-5)
|
||||
|
||||
for weight in weight_list:
|
||||
wght.write_i32(weight.bone)
|
||||
wght.write_f32(weight.weight / total_weight)
|
||||
|
||||
def _write_envl(envl: Writer, model: Model, model_index: Dict[str, int]):
|
||||
envl.write_u32(len(model.bone_map))
|
||||
for bone_name in model.bone_map:
|
||||
envl.write_u32(model_index[bone_name])
|
||||
|
||||
'''
|
||||
SKELETON CHUNKS
|
||||
'''
|
||||
def _write_bln2(bln2: Writer, anim: Animation):
|
||||
bones = anim.bone_frames.keys()
|
||||
bln2.write_u32(len(bones))
|
||||
|
||||
for bone_crc in bones:
|
||||
bln2.write_u32(bone_crc, 0)
|
||||
|
||||
def _write_skl2(skl2: Writer, anim: Animation):
|
||||
bones = anim.bone_frames.keys()
|
||||
skl2.write_u32(len(bones))
|
||||
|
||||
for bone_crc in bones:
|
||||
skl2.write_u32(bone_crc, 0) #default values from docs
|
||||
skl2.write_f32(1.0, 0.0, 0.0)
|
||||
|
||||
'''
|
||||
ANIMATION CHUNKS
|
||||
'''
|
||||
def _write_anm2(anm2: Writer, anim: Animation):
|
||||
|
||||
with anm2.create_child("CYCL") as cycl:
|
||||
|
||||
cycl.write_u32(1)
|
||||
cycl.write_string(anim.name)
|
||||
|
||||
for _ in range(63 - len(anim.name)):
|
||||
cycl.write_u8(0)
|
||||
|
||||
cycl.write_f32(anim.framerate)
|
||||
cycl.write_u32(0) #what does play style refer to?
|
||||
cycl.write_u32(anim.start_index, anim.end_index) #first frame indices
|
||||
|
||||
|
||||
with anm2.create_child("KFR3") as kfr3:
|
||||
|
||||
kfr3.write_u32(len(anim.bone_frames))
|
||||
|
||||
for bone_crc in anim.bone_frames:
|
||||
kfr3.write_u32(bone_crc)
|
||||
kfr3.write_u32(0) #what is keyframe type?
|
||||
|
||||
translation_frames, rotation_frames = anim.bone_frames[bone_crc]
|
||||
|
||||
kfr3.write_u32(len(translation_frames), len(rotation_frames))
|
||||
|
||||
for frame in translation_frames:
|
||||
kfr3.write_u32(frame.index)
|
||||
kfr3.write_f32(frame.translation.x, frame.translation.y, frame.translation.z)
|
||||
|
||||
for frame in rotation_frames:
|
||||
kfr3.write_u32(frame.index)
|
||||
kfr3.write_f32(frame.rotation.x, frame.rotation.y, frame.rotation.z, frame.rotation.w)
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
""" Contains Scene object for representing a .msh file and the function to create one
|
||||
from a Blender scene. """
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict
|
||||
from copy import copy
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
from .msh_model import Model, Animation, ModelType
|
||||
from .msh_scene import Scene, SceneAABB
|
||||
from .msh_model_gather import gather_models
|
||||
from .msh_model_utilities import sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix, inject_dummy_data
|
||||
from .msh_model_triangle_strips import create_models_triangle_strips
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import gather_materials
|
||||
from .msh_material_utilities import remove_unused_materials
|
||||
from .msh_utilities import *
|
||||
from .msh_anim_gather import extract_anim
|
||||
|
||||
|
||||
|
||||
def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_target: str, skel_only: bool, export_anim: bool) -> Scene:
|
||||
""" Create a msh Scene from the active Blender scene. """
|
||||
|
||||
scene = Scene()
|
||||
|
||||
scene.name = bpy.context.scene.name
|
||||
|
||||
scene.materials = gather_materials()
|
||||
|
||||
scene.models, armature_obj = gather_models(apply_modifiers=apply_modifiers, export_target=export_target, skeleton_only=skel_only)
|
||||
scene.models = sort_by_parent(scene.models)
|
||||
|
||||
if generate_triangle_strips:
|
||||
scene.models = create_models_triangle_strips(scene.models)
|
||||
else:
|
||||
for model in scene.models:
|
||||
if model.geometry:
|
||||
for segment in model.geometry:
|
||||
segment.triangle_strips = segment.triangles
|
||||
|
||||
if has_multiple_root_models(scene.models):
|
||||
scene.models = reparent_model_roots(scene.models)
|
||||
|
||||
scene.materials = remove_unused_materials(scene.materials, scene.models)
|
||||
|
||||
|
||||
root = scene.models[0]
|
||||
|
||||
if export_anim:
|
||||
if armature_obj is not None:
|
||||
scene.animation = extract_anim(armature_obj, root.name)
|
||||
else:
|
||||
raise Exception("Export Error: Could not find an armature object from which to export an animation!")
|
||||
|
||||
if skel_only and root.model_type == ModelType.NULL:
|
||||
# For ZenAsset
|
||||
inject_dummy_data(root)
|
||||
|
||||
return scene
|
||||
|
||||
|
||||
def create_scene_aabb(scene: Scene) -> SceneAABB:
|
||||
""" Create a SceneAABB for a Scene. """
|
||||
|
||||
global_aabb = SceneAABB()
|
||||
|
||||
for model in scene.models:
|
||||
if model.geometry is None or model.hidden:
|
||||
continue
|
||||
|
||||
model_world_matrix = get_model_world_matrix(model, scene.models)
|
||||
model_aabb = SceneAABB()
|
||||
|
||||
for segment in model.geometry:
|
||||
segment_aabb = SceneAABB()
|
||||
|
||||
for pos in segment.positions:
|
||||
segment_aabb.integrate_position(model_world_matrix @ pos)
|
||||
|
||||
model_aabb.integrate_aabb(segment_aabb)
|
||||
|
||||
global_aabb.integrate_aabb(model_aabb)
|
||||
|
||||
return global_aabb
|
|
@ -0,0 +1,48 @@
|
|||
""" Keeps track of exact skeleton when imported. Possibly needed for exporting skeleton-compatible animations. Will
|
||||
probably be needed (with a matrix property) if we:
|
||||
- add tip-to-tail adjustment and/or omit roots/effectors for imported skeletons to keep track of the original bone transforms
|
||||
- add some sort of basepose-adjustment animation import option for already imported skeletons
|
||||
|
||||
I guess this might not need a panel, but I included it because the docs might need to reference it and
|
||||
people may want to exclude certain bones without deleting keyframes.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
from bpy.props import StringProperty
|
||||
from bpy.types import PropertyGroup
|
||||
|
||||
|
||||
class SkeletonProperties(PropertyGroup):
|
||||
name: StringProperty(name="Name", default="Bone Name")
|
||||
|
||||
|
||||
|
||||
class SkeletonPropertiesPanel(bpy.types.Panel):
|
||||
""" Creates a Panel in the Object properties window """
|
||||
bl_label = "SWBF Skeleton Properties"
|
||||
bl_idname = "SKELETON_PT_swbf_msh"
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_context = "data"
|
||||
bl_options = {'DEFAULT_CLOSED'}
|
||||
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.object.type == 'ARMATURE' and context.object.data.swbf_msh_skel and len(context.object.data.swbf_msh_skel) > 0
|
||||
|
||||
|
||||
def draw(self, context):
|
||||
if context.object is None:
|
||||
return
|
||||
|
||||
layout = self.layout
|
||||
|
||||
skel_props = context.object.data.swbf_msh_skel
|
||||
|
||||
layout.label(text = "Bones In MSH Skeleton: ")
|
||||
|
||||
for prop in skel_props:
|
||||
layout.prop(prop, "name")
|
||||
|
||||
|
|
@ -0,0 +1,211 @@
|
|||
""" Helpers for SWBF skeleton-armature mapping """
|
||||
|
||||
import bpy
|
||||
import math
|
||||
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_model import *
|
||||
from .msh_model_utilities import *
|
||||
|
||||
from .crc import *
|
||||
|
||||
|
||||
def has_preserved_skeleton(armature : bpy.types.Armature):
|
||||
return len(armature.data.swbf_msh_skel) > 0
|
||||
|
||||
|
||||
|
||||
'''Returns all bones that should be marked as BONE'''
|
||||
def get_real_BONES(armature: bpy.types.Armature) -> Set[str]:
|
||||
|
||||
# First priority, add the names of the skeleton preserved on import
|
||||
skel_props = armature.data.swbf_msh_skel
|
||||
|
||||
# Second, add all keyed bones
|
||||
action = armature.animation_data.action if armature.animation_data else None
|
||||
|
||||
# Third, just add all bones in armature
|
||||
|
||||
# Set of bones to include
|
||||
real_bones : Set[str] = set()
|
||||
|
||||
if len(skel_props) > 0:
|
||||
for bone in skel_props:
|
||||
#print(f"{bone.name} is a real BONE")
|
||||
real_bones.add(bone.name)
|
||||
elif action:
|
||||
for group in armature.animation_data.action.groups:
|
||||
#print(f"{group.name} is a real BONE")
|
||||
real_bones.add(group.name)
|
||||
else:
|
||||
for bone in armature.data.bones:
|
||||
#print(f"{bone.name} is a real BONE")
|
||||
real_bones.add(bone.name)
|
||||
|
||||
return real_bones
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Creates armature from the required nodes.
|
||||
Assumes the required_skeleton is already sorted by parent.
|
||||
|
||||
Uses model_map to get the world matrix of each bone (hacky, see NOTE)
|
||||
'''
|
||||
def required_skeleton_to_armature(required_skeleton : List[Model], model_map : Dict[str, bpy.types.Object], msh_scene : Scene) -> bpy.types.Object:
|
||||
|
||||
armature = bpy.data.armatures.new("skeleton")
|
||||
armature_obj = bpy.data.objects.new("skeleton", armature)
|
||||
bpy.context.view_layer.active_layer_collection.collection.objects.link(armature_obj)
|
||||
|
||||
|
||||
bones_set = set([model.name for model in required_skeleton])
|
||||
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = armature_obj
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in required_skeleton:
|
||||
|
||||
edit_bone = armature.edit_bones.new(bone.name)
|
||||
|
||||
if bone.parent and bone.parent in bones_set:
|
||||
edit_bone.parent = armature.edit_bones[bone.parent]
|
||||
|
||||
'''
|
||||
NOTE: I recall there being some rare issue with the get_world_matrix utility func.
|
||||
Never bothered to figure it out and referencing the bone object's world mat always works.
|
||||
Bone objects will be deleted later.
|
||||
'''
|
||||
bone_obj = model_map[bone.name]
|
||||
|
||||
edit_bone.matrix = bone_obj.matrix_world
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,1.0,0.0))
|
||||
|
||||
bone_children = [b for b in get_model_children(bone, required_skeleton)]
|
||||
|
||||
'''
|
||||
Perhaps we'll add an option for importing bones tip-to-tail, but that would
|
||||
require preserving their original transforms as changing the tail position
|
||||
changes the bones' transform...
|
||||
'''
|
||||
tail_pos = Vector()
|
||||
if bone_children:
|
||||
for bone_child in bone_children:
|
||||
tail_pos += bone_obj.matrix_world.translation
|
||||
tail_pos = tail_pos / len(bone_children)
|
||||
edit_bone.length = .5 #(tail_pos - edit_bone.head).magnitude
|
||||
else:
|
||||
bone_length = .5# edit_bone.parent.length if edit_bone.parent is not None else .5
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,bone_length,0.0))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
return armature_obj
|
||||
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Ok, so this method is crucial. What this does is:
|
||||
1) Find all nodes that are weighted to by skinned segments.
|
||||
2) A node must be included in the armature if it:
|
||||
- is in SKL2 and is not the scene root
|
||||
- has model_type == BONE
|
||||
- is weighted to
|
||||
- has a parent and child that must be in the armature
|
||||
|
||||
This may need a lot of adjustments, don't think I can prove it's validity but it has worked very well
|
||||
and handles all stock + ZETools + Pandemic XSI exporter models I've tested
|
||||
'''
|
||||
def extract_required_skeleton(scene: Scene) -> List[Model]:
|
||||
|
||||
# Will map Model names to Models in scene, for convenience
|
||||
model_dict : Dict[str, Model] = {}
|
||||
|
||||
'''
|
||||
Will contain hashes of all models that definitely need to be in the skeleton/armature.
|
||||
We initialize it with the contents of SKL2 i.e. the nodes that are animated.
|
||||
For now this includes the scene root, but that'll be excluded later.
|
||||
'''
|
||||
skeleton_hashes = set(scene.skeleton)
|
||||
|
||||
'''
|
||||
We also need to add all nodes that are weighted to. These are not necessarily in
|
||||
SKL2, as SKL2 seems to only reference nodes that are keyframed.
|
||||
However, sometimes SKL2 is not included when it should be, but it can be mostly recovered
|
||||
by checking which models are BONEs.
|
||||
'''
|
||||
for model in scene.models:
|
||||
model_dict[model.name] = model
|
||||
|
||||
if model.model_type == ModelType.BONE:
|
||||
skeleton_hashes.add(to_crc(model.name))
|
||||
|
||||
elif model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.weights:
|
||||
for weight_set in seg.weights:
|
||||
for weight in weight_set:
|
||||
model_weighted_to = scene.models[weight.bone]
|
||||
|
||||
if to_crc(model_weighted_to.name) not in skeleton_hashes:
|
||||
skeleton_hashes.add(to_crc(model_weighted_to.name))
|
||||
|
||||
# The result of this function (to be sorted by parent)
|
||||
required_skeleton_models = []
|
||||
|
||||
# Set of nodes to be included in required skeleton/were visited
|
||||
visited_nodes = set()
|
||||
|
||||
'''
|
||||
Here we add all skeleton nodes (except root) and any necessary ancestors to the armature.
|
||||
- e.g. in bone_x/eff_x/eff_y, the effectors do not have to be in armature, as they are not ancestors of a bone
|
||||
- but in bone_x/eff_x/eff_y/bone_y, they do.
|
||||
'''
|
||||
for bone in sort_by_parent(scene.models):
|
||||
|
||||
# make sure we exclude the scene root and any nodes irrelevant to the armature
|
||||
if not bone.parent or to_crc(bone.name) not in skeleton_hashes:
|
||||
continue
|
||||
|
||||
potential_bones = [bone]
|
||||
visited_nodes.add(bone.name)
|
||||
|
||||
# Stacked transform will be needed if we decide to include an option for excluding effectors/roots or
|
||||
# connecting bones tip-to-tail
|
||||
#stacked_transform = model_transform_to_matrix(bone.transform)
|
||||
|
||||
curr_ancestor = model_dict[bone.parent]
|
||||
|
||||
while True:
|
||||
|
||||
# If we hit a non-skin scene root, that means we just add the bone we started with, no ancestors.
|
||||
if not curr_ancestor.parent and curr_ancestor.model_type != ModelType.SKIN:
|
||||
required_skeleton_models.append(bone)
|
||||
visited_nodes.add(bone.name)
|
||||
break
|
||||
|
||||
# If we encounter another bone, a skin, or a previously visited object, we need to add the bone and its
|
||||
# ancestors.
|
||||
elif to_crc(curr_ancestor.name) in scene.skeleton or curr_ancestor.model_type == ModelType.SKIN or curr_ancestor.name in visited_nodes:
|
||||
for potential_bone in potential_bones:
|
||||
required_skeleton_models.append(potential_bone)
|
||||
visited_nodes.add(potential_bone.name)
|
||||
break
|
||||
|
||||
# Add ancestor to potential bones, update next ancestor
|
||||
else:
|
||||
if curr_ancestor.name not in visited_nodes:
|
||||
potential_bones.insert(0, curr_ancestor)
|
||||
curr_ancestor = model_dict[curr_ancestor.parent]
|
||||
|
||||
#stacked_transform = model_transform_to_matrix(curr_ancestor.transform) @ stacked_transform
|
||||
|
||||
return required_skeleton_models
|
|
@ -0,0 +1,374 @@
|
|||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
|
||||
# Extracts and applies anims in the scene to the currently selected armature
|
||||
def extract_and_apply_anim(filename : str, scene : Scene):
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
|
||||
if not arma or arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if scene.animation is None:
|
||||
raise Exception("No animation found in msh file!")
|
||||
|
||||
else:
|
||||
head, tail = os.path.split(filename)
|
||||
anim_name = tail.split(".")[0]
|
||||
action = bpy.data.actions.new(anim_name)
|
||||
action.use_fake_user = True
|
||||
|
||||
if not arma.animation_data:
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
# Record the starting transforms of each bone. Pose space is relative
|
||||
# to bones starting transforms. Starting = in edit mode
|
||||
bone_bind_poses = {}
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if edit_bone.parent:
|
||||
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bone_local = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bone_local.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
for bone in arma.pose.bones:
|
||||
if to_crc(bone.name) in scene.animation.bone_frames:
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
|
||||
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
|
||||
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
for frame in rotation_frames:
|
||||
i = frame.index
|
||||
q = (bind_mat @ convert_rotation_space(frame.rotation).to_matrix().to_4x4()).to_quaternion()
|
||||
|
||||
fcurve_rot_w.keyframe_points.insert(i,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(i,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(i,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(i,q.z)
|
||||
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in translation_frames:
|
||||
i = frame.index
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(frame.translation))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(i,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(i,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(i,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Create the msh hierachy. Armatures are not created here. Much of this could use some optimization...
|
||||
def extract_models(scene: Scene, materials_map : Dict[str, bpy.types.Material]) -> Dict[str, bpy.types.Object]:
|
||||
|
||||
# This will be filled with model names -> Blender objects and returned
|
||||
model_map : Dict[str, bpy.types.Object] = {}
|
||||
|
||||
sorted_models : List[Model] = sort_by_parent(scene.models)
|
||||
|
||||
for model in sorted_models:
|
||||
new_obj = None
|
||||
|
||||
|
||||
if model.model_type == ModelType.STATIC or model.model_type == ModelType.SKIN or model.model_type == ModelType.SHADOWVOLUME:
|
||||
|
||||
new_mesh = bpy.data.meshes.new(model.name)
|
||||
verts = []
|
||||
faces = []
|
||||
offset = 0
|
||||
|
||||
full_texcoords = []
|
||||
|
||||
weights_offsets = {}
|
||||
|
||||
face_range_to_material_index = []
|
||||
|
||||
if model.geometry:
|
||||
|
||||
#if model.collisionprimitive is None:
|
||||
# print(f"On model: {model.name}")
|
||||
|
||||
for i,seg in enumerate(model.geometry):
|
||||
|
||||
verts += [tuple(convert_vector_space(v)) for v in seg.positions]
|
||||
|
||||
#if model.collisionprimitive is None:
|
||||
# print("Importing segment with material: {} with and {} verts".format(seg.material_name, len(seg.positions)))
|
||||
|
||||
if seg.weights:
|
||||
weights_offsets[offset] = seg.weights
|
||||
|
||||
if seg.texcoords is not None:
|
||||
full_texcoords += seg.texcoords
|
||||
else:
|
||||
full_texcoords += [(0.0,0.0) for _ in range(len(seg.positions))]
|
||||
|
||||
face_range_lower = len(faces)
|
||||
|
||||
if seg.triangles:
|
||||
faces += [tuple([ind + offset for ind in tri]) for tri in seg.triangles]
|
||||
else:
|
||||
for strip in seg.triangle_strips:
|
||||
for i in range(len(strip) - 2):
|
||||
face = tuple([offset + strip[j] for j in range(i,i+3)])
|
||||
faces.append(face)
|
||||
|
||||
face_range_upper = len(faces)
|
||||
face_range_to_material_index.append((face_range_lower, face_range_upper, i))
|
||||
|
||||
offset += len(seg.positions)
|
||||
|
||||
new_mesh.from_pydata(verts, [], faces)
|
||||
new_mesh.update()
|
||||
new_mesh.validate()
|
||||
|
||||
|
||||
# If tex coords are present, add material and UV data
|
||||
if full_texcoords:
|
||||
|
||||
edit_mesh = bmesh.new()
|
||||
edit_mesh.from_mesh(new_mesh)
|
||||
|
||||
uvlayer = edit_mesh.loops.layers.uv.verify()
|
||||
|
||||
for edit_mesh_face in edit_mesh.faces:
|
||||
face_index = edit_mesh_face.index
|
||||
mesh_face = faces[face_index]
|
||||
|
||||
for frL, frU, ind in face_range_to_material_index:
|
||||
if face_index >= frL and face_index < frU:
|
||||
edit_mesh_face.material_index = ind
|
||||
|
||||
for i,loop in enumerate(edit_mesh_face.loops):
|
||||
|
||||
texcoord = full_texcoords[mesh_face[i]]
|
||||
loop[uvlayer].uv = tuple([texcoord.x, texcoord.y])
|
||||
|
||||
edit_mesh.to_mesh(new_mesh)
|
||||
edit_mesh.free()
|
||||
|
||||
|
||||
new_obj = bpy.data.objects.new(new_mesh.name, new_mesh)
|
||||
|
||||
|
||||
vertex_groups_indicies = {}
|
||||
|
||||
for offset in weights_offsets:
|
||||
for i, weight_set in enumerate(weights_offsets[offset]):
|
||||
for weight in weight_set:
|
||||
index = weight.bone
|
||||
|
||||
if index not in vertex_groups_indicies:
|
||||
model_name = scene.models[index].name
|
||||
vertex_groups_indicies[index] = new_obj.vertex_groups.new(name=model_name)
|
||||
|
||||
vertex_groups_indicies[index].add([offset + i], weight.weight, 'ADD')
|
||||
|
||||
|
||||
'''
|
||||
Assign Material slots
|
||||
'''
|
||||
if model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.material_name:
|
||||
material = materials_map[seg.material_name]
|
||||
new_obj.data.materials.append(material)
|
||||
|
||||
else:
|
||||
|
||||
new_obj = bpy.data.objects.new(model.name, None)
|
||||
new_obj.empty_display_size = 1
|
||||
new_obj.empty_display_type = 'PLAIN_AXES'
|
||||
|
||||
|
||||
model_map[model.name] = new_obj
|
||||
|
||||
if model.parent:
|
||||
new_obj.parent = model_map[model.parent]
|
||||
|
||||
new_obj.location = convert_vector_space(model.transform.translation)
|
||||
new_obj.rotation_mode = "QUATERNION"
|
||||
new_obj.rotation_quaternion = convert_rotation_space(model.transform.rotation)
|
||||
|
||||
if model.collisionprimitive is not None:
|
||||
new_obj.swbf_msh_coll_prim.prim_type = model.collisionprimitive.shape.value
|
||||
|
||||
bpy.context.collection.objects.link(new_obj)
|
||||
|
||||
|
||||
return model_map
|
||||
|
||||
|
||||
# TODO: Add to custom material info struct, maybe some material conversion/import?
|
||||
def extract_materials(folder_path: str, scene: Scene) -> Dict[str, bpy.types.Material]:
|
||||
|
||||
extracted_materials : Dict[str, bpy.types.Material] = {}
|
||||
|
||||
for material_name, material in scene.materials.items():
|
||||
|
||||
new_mat = bpy.data.materials.new(name=material_name)
|
||||
new_mat.use_nodes = True
|
||||
bsdf = new_mat.node_tree.nodes["Principled BSDF"]
|
||||
|
||||
diffuse_texture_path = find_texture_path(folder_path, material.texture0)
|
||||
|
||||
if diffuse_texture_path:
|
||||
texImage = new_mat.node_tree.nodes.new('ShaderNodeTexImage')
|
||||
texImage.image = bpy.data.images.load(diffuse_texture_path)
|
||||
new_mat.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
|
||||
|
||||
fill_material_props(material, new_mat.swbf_msh_mat)
|
||||
|
||||
extracted_materials[material_name] = new_mat
|
||||
|
||||
return extracted_materials
|
||||
|
||||
|
||||
|
||||
def extract_scene(filepath: str, scene: Scene):
|
||||
|
||||
folder = os.path.join(os.path.dirname(filepath),"")
|
||||
|
||||
# material_map mapes Material names to Blender materials
|
||||
material_map = extract_materials(folder, scene)
|
||||
|
||||
# model_map maps Model names to Blender objects.
|
||||
model_map = extract_models(scene, material_map)
|
||||
|
||||
|
||||
# skel contains all models needed in an armature
|
||||
skel = extract_required_skeleton(scene)
|
||||
|
||||
# Create the armature if skel is non-empty
|
||||
armature = None if not skel else required_skeleton_to_armature(skel, model_map, scene)
|
||||
|
||||
if armature is not None:
|
||||
preserved_skel = armature.data.swbf_msh_skel
|
||||
for model in scene.models:
|
||||
if to_crc(model.name) in scene.skeleton or model.model_type == ModelType.BONE:
|
||||
entry = preserved_skel.add()
|
||||
entry.name = model.name
|
||||
|
||||
|
||||
'''
|
||||
If an armature was created, we need to do a few extra
|
||||
things to ensure the import makes sense in Blender. It can
|
||||
get a bit messy, as XSI + SWBF have very loose requirements
|
||||
when it comes to skin-skeleton parentage.
|
||||
|
||||
If not, we're good.
|
||||
'''
|
||||
if armature is not None:
|
||||
|
||||
has_skin = False
|
||||
|
||||
# Handle armature related parenting
|
||||
for curr_model in scene.models:
|
||||
|
||||
curr_obj = model_map[curr_model.name]
|
||||
|
||||
# Parent all skins to armature
|
||||
if curr_model.model_type == ModelType.SKIN:
|
||||
|
||||
has_skin = True
|
||||
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'ARMATURE'
|
||||
|
||||
# Parent the object to a bone if necessary
|
||||
else:
|
||||
if curr_model.parent in armature.data.bones and curr_model.name not in armature.data.bones:
|
||||
# Not sure what the different mats do, but saving the worldmat and
|
||||
# applying it after clearing the other mats yields correct results...
|
||||
worldmat = curr_obj.matrix_world
|
||||
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'BONE'
|
||||
curr_obj.parent_bone = curr_model.parent
|
||||
# ''
|
||||
curr_obj.matrix_basis = Matrix()
|
||||
curr_obj.matrix_parent_inverse = Matrix()
|
||||
curr_obj.matrix_world = worldmat
|
||||
|
||||
'''
|
||||
Sometimes skins are parented to other skins. We need to find the skin highest in the hierarchy and
|
||||
parent all skins to its parent (armature_reparent_obj).
|
||||
|
||||
If not skin exists, we just reparent the armature to the parent of the highest node in the skeleton
|
||||
'''
|
||||
armature_reparent_obj = None
|
||||
if has_skin:
|
||||
for model in sort_by_parent(scene.models):
|
||||
if model.model_type == ModelType.SKIN:
|
||||
armature_reparent_obj = None if not model.parent else model_map[model.parent]
|
||||
else:
|
||||
skeleton_parent_name = skel[0].parent
|
||||
for model in scene.models:
|
||||
if model.name == skeleton_parent_name:
|
||||
armature_reparent_obj = None if not skeleton_parent_name else model_map[skeleton_parent_name]
|
||||
|
||||
# Now we reparent the armature to the node (armature_reparent_obj) we just found
|
||||
if armature_reparent_obj is not None and armature.name != armature_reparent_obj.name:
|
||||
armature.parent = armature_reparent_obj
|
||||
|
||||
|
||||
# If an bone exists in the armature, delete its
|
||||
# object counterpart (as created in extract_models)
|
||||
for bone in skel:
|
||||
model_to_remove = model_map[bone.name]
|
||||
if model_to_remove:
|
||||
bpy.data.objects.remove(model_to_remove, do_unlink=True)
|
||||
model_map.pop(bone.name)
|
||||
|
||||
|
||||
# Lastly, hide all that is hidden in the msh scene
|
||||
for model in scene.models:
|
||||
if model.name in model_map:
|
||||
obj = model_map[model.name]
|
||||
if get_is_model_hidden(obj) and len(obj.children) == 0:
|
||||
obj.hide_set(True)
|
|
@ -1,6 +1,14 @@
|
|||
""" Misc utilities. """
|
||||
|
||||
from mathutils import Vector
|
||||
from typing import List
|
||||
|
||||
|
||||
def vec_to_str(vec):
|
||||
return "({:.4},{:.4},{:.4})".format(vec.x,vec.y,vec.z)
|
||||
|
||||
def quat_to_str(quat):
|
||||
return "({:.4},{:.4},{:.4},{:.4})".format(quat.w, quat.x, quat.y, quat.z)
|
||||
|
||||
def add_vec(l: Vector, r: Vector) -> Vector:
|
||||
return Vector(v0 + v1 for v0, v1 in zip(l, r))
|
||||
|
@ -29,3 +37,14 @@ def pack_color(color) -> int:
|
|||
packed |= (int(color[3] * 255.0 + 0.5) << 24)
|
||||
|
||||
return packed
|
||||
|
||||
def unpack_color(color: int) -> List[float]:
|
||||
|
||||
mask = int(0x000000ff)
|
||||
|
||||
r = (color & (mask << 16)) / 255.0
|
||||
g = (color & (mask << 8)) / 255.0
|
||||
b = (color & mask) / 255.0
|
||||
a = (color & (mask << 24)) / 255.0
|
||||
|
||||
return [r,g,b,a]
|
||||
|
|
|
@ -0,0 +1,433 @@
|
|||
"""
|
||||
Script for reading zaabin/zaa files and applying the unmunged animation
|
||||
to the currently selected armature.
|
||||
|
||||
As regards decompress_curves, I should really make a separate AnimationSet
|
||||
dataclass instead of returning a convoluted nested dict.
|
||||
"""
|
||||
|
||||
import os
|
||||
import bpy
|
||||
import re
|
||||
|
||||
from .chunked_file_reader import Reader
|
||||
from .crc import *
|
||||
|
||||
from .msh_model import *
|
||||
from .msh_model_utilities import *
|
||||
from .msh_utilities import *
|
||||
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
|
||||
debug = False
|
||||
|
||||
|
||||
#anims #bones #components #keyframes: index,value
|
||||
def decompress_curves(input_file) -> Dict[int, Dict[int, List[ Dict[int,float]]]]:
|
||||
|
||||
global debug
|
||||
|
||||
decompressed_anims: Dict[int, Dict[int, List[ Dict[int,float]]]] = {}
|
||||
|
||||
with Reader(input_file, debug=debug) as head:
|
||||
|
||||
# Dont read SMNA as child, since it has a length field always set to 0...
|
||||
head.skip_until("SMNA")
|
||||
head.skip_bytes(20)
|
||||
num_anims = head.read_u16()
|
||||
|
||||
if debug:
|
||||
print("\nFile contains {} animations\n".format(num_anims))
|
||||
|
||||
head.skip_bytes(2)
|
||||
|
||||
anim_crcs = []
|
||||
anim_metadata = {}
|
||||
|
||||
head.skip_until("MINA")
|
||||
|
||||
# Read metadata (crc, num frames, num bones) for each anim
|
||||
with head.read_child() as mina:
|
||||
|
||||
for i in range(num_anims):
|
||||
|
||||
transBitFlags = mina.read_u32()
|
||||
mina.skip_bytes(4)
|
||||
|
||||
anim_crc = mina.read_u32()
|
||||
anim_crcs.append(anim_crc)
|
||||
|
||||
anim_metadata[anim_crc] = {
|
||||
"num_frames" : mina.read_u16(),
|
||||
"num_bones" : mina.read_u16(),
|
||||
"transBitFlags" : transBitFlags,
|
||||
}
|
||||
|
||||
|
||||
head.skip_until("TNJA")
|
||||
|
||||
# Read TADA offsets and quantization parameters for each rot + loc component, for each bone, for each anim
|
||||
with head.read_child() as tnja:
|
||||
|
||||
for i, anim_crc in enumerate(anim_crcs):
|
||||
|
||||
bone_params = {}
|
||||
bone_list = []
|
||||
|
||||
for _ in range(anim_metadata[anim_crc]["num_bones"]):
|
||||
|
||||
bone_crc = tnja.read_u32()
|
||||
|
||||
bone_list.append(bone_crc)
|
||||
|
||||
bone_params[bone_crc] = {
|
||||
"rot_offsets" : [tnja.read_u32() for _ in range(4)], # Offsets into TADA for rotation
|
||||
"loc_offsets" : [tnja.read_u32() for _ in range(3)], # and translation curves
|
||||
"qparams" : [tnja.read_f32() for _ in range(4)], # Translation quantization parameters, 3 biases, 1 multiplier
|
||||
}
|
||||
|
||||
anim_metadata[anim_crc]["bone_params"] = bone_params
|
||||
anim_metadata[anim_crc]["bone_list"] = bone_list
|
||||
|
||||
head.skip_until("TADA")
|
||||
|
||||
# Decompress/dequantize frame data into discrete per-component curves
|
||||
with head.read_child() as tada:
|
||||
|
||||
for anim_crc in anim_crcs:
|
||||
|
||||
decompressed_anims[anim_crc] = {}
|
||||
|
||||
num_frames = anim_metadata[anim_crc]["num_frames"]
|
||||
num_bones = anim_metadata[anim_crc]["num_bones"]
|
||||
|
||||
transBitFlags = anim_metadata[anim_crc]["transBitFlags"]
|
||||
|
||||
if debug:
|
||||
print("\n\tAnim hash: {} Num frames: {} Num joints: {}".format(hex(anim_crc), num_frames, num_bones))
|
||||
|
||||
for bone_num, bone_crc in enumerate(anim_metadata[anim_crc]["bone_list"]):
|
||||
|
||||
bone_curves = []
|
||||
|
||||
params_bone = anim_metadata[anim_crc]["bone_params"][bone_crc]
|
||||
|
||||
offsets_list = params_bone["rot_offsets"] + params_bone["loc_offsets"]
|
||||
qparams = params_bone["qparams"]
|
||||
|
||||
if debug:
|
||||
print("\n\t\tBone #{} hash: {}".format(bone_num,hex(bone_crc)))
|
||||
print("\n\t\tQParams: {}, {}, {}, {}".format(*qparams))
|
||||
|
||||
for o, start_offset in enumerate(offsets_list):
|
||||
|
||||
# Init curve dict
|
||||
curve : Dict[int,float] = {}
|
||||
|
||||
# Init accumulator
|
||||
accumulator = 0.0
|
||||
|
||||
|
||||
# 2047 = max val of signed 12 bit int, the (overwhelmingly) common compression amount.
|
||||
# This is used for all rotation components in the file, with no offset
|
||||
if o < 4:
|
||||
mult = 1 / 2047
|
||||
bias = 0.0
|
||||
|
||||
# Translations have specific quantization parameters; biases for each component and
|
||||
# a single multiplier for all three
|
||||
else:
|
||||
|
||||
mult = qparams[-1]
|
||||
bias = qparams[o - 4]
|
||||
|
||||
if debug:
|
||||
print("\n\t\t\tBias = {}, multiplier = {}".format(bias, mult))
|
||||
|
||||
if debug:
|
||||
print("\n\t\t\tOffset {}: {} ({}, {} remaining)".format(o,start_offset, tada.get_current_pos(), tada.how_much_left(tada.get_current_pos())))
|
||||
|
||||
# Skip to start of compressed data for component, as specified in TNJA
|
||||
tada.skip_bytes(start_offset)
|
||||
|
||||
|
||||
j = 0
|
||||
while (j < num_frames):
|
||||
accumulator = bias + mult * tada.read_i16()
|
||||
curve[j if j < num_frames else num_frames] = accumulator
|
||||
|
||||
if debug:
|
||||
print("\t\t\t\t{}: {}".format(j, accumulator))
|
||||
|
||||
j+=1
|
||||
|
||||
while (j < num_frames):
|
||||
|
||||
control = tada.read_i8()
|
||||
|
||||
# Reset the accumulator to next dequantized i16
|
||||
if control == -0x7f:
|
||||
if debug:
|
||||
print("\t\t\t\tControl: READING NEXT FRAME")
|
||||
break
|
||||
|
||||
# RLE: hold current accumulator for the next u8 frames
|
||||
elif control == -0x80:
|
||||
num_skips = tada.read_u8()
|
||||
if debug:
|
||||
print("\t\t\t\tControl: HOLDING FOR {} FRAMES".format(num_skips))
|
||||
j += num_skips
|
||||
|
||||
# If not a special value, increment accumulator by the dequantized i8
|
||||
# The bias is NOT applied here, only for accumulator resets
|
||||
else:
|
||||
accumulator += mult * float(control)
|
||||
curve[j if j < num_frames else num_frames] = accumulator
|
||||
if debug:
|
||||
print("\t\t\t\t{}: {}".format(j, accumulator))
|
||||
j+=1
|
||||
|
||||
curve[num_frames - 1] = accumulator
|
||||
|
||||
tada.reset_pos()
|
||||
|
||||
bone_curves.append(curve)
|
||||
|
||||
decompressed_anims[anim_crc][bone_crc] = bone_curves
|
||||
|
||||
return decompressed_anims
|
||||
|
||||
|
||||
'''
|
||||
Gets the animation names from the supplied
|
||||
.anims file. Handy since .zaabin files often
|
||||
share a dir with a .anims file.
|
||||
'''
|
||||
|
||||
def read_anims_file(anims_file_path):
|
||||
|
||||
if not os.path.exists(anims_file_path):
|
||||
return []
|
||||
|
||||
with open(anims_file_path, 'r') as file:
|
||||
anims_text = file.read()
|
||||
|
||||
splits = anims_text.split('"')
|
||||
|
||||
if len(splits) > 1:
|
||||
return splits[1:-1:2]
|
||||
|
||||
return []
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Unmunge the .zaa(bin) file and apply the resulting animation
|
||||
to the currently selected armature object.
|
||||
|
||||
Contains some bloated code for calculating the world transforms of each bone,
|
||||
for now this will work ONLY if the model was directly imported from a .msh file.
|
||||
'''
|
||||
|
||||
def extract_and_apply_munged_anim(input_file_path):
|
||||
|
||||
global debug
|
||||
|
||||
with open(input_file_path,"rb") as input_file:
|
||||
animation_set = decompress_curves(input_file)
|
||||
|
||||
anim_names = []
|
||||
if input_file_path.endswith(".zaabin"):
|
||||
anim_names = read_anims_file(input_file_path.replace(".zaabin", ".anims"))
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
if arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if arma.animation_data is not None:
|
||||
arma.animation_data_clear()
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
|
||||
"""
|
||||
When directly imported from .msh files,
|
||||
all skeleton models are saved as emptys, since
|
||||
some are excluded from the actual armature (effectors, roots, eg...).
|
||||
|
||||
bond_bind_poses contains matrices for converting the transform of
|
||||
bones found in .msh/.zaabin files to ones that'll fit the extracted armature.
|
||||
This will be replaced with the eventual importer release.
|
||||
"""
|
||||
|
||||
animated_bones = set()
|
||||
for anim_crc in animation_set:
|
||||
for bone_crc in animation_set[anim_crc]:
|
||||
animated_bones.add(bone_crc)
|
||||
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
bone_bind_poses = {}
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if to_crc(edit_bone.name) not in animated_bones:
|
||||
continue
|
||||
|
||||
curr_ancestor = edit_bone.parent
|
||||
while curr_ancestor is not None and to_crc(curr_ancestor.name) not in animated_bones:
|
||||
curr_ancestor = curr_ancestor.parent
|
||||
|
||||
if curr_ancestor:
|
||||
bind_mat = curr_ancestor.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bind_mat = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bind_mat.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
if debug:
|
||||
print("Extracting {} animations from {}:".format(len(animation_set), input_file_path))
|
||||
|
||||
for anim_crc in animation_set:
|
||||
|
||||
found_anim = [anim_name for anim_name in anim_names if to_crc(anim_name) == anim_crc]
|
||||
if found_anim:
|
||||
anim_str = found_anim[0]
|
||||
else:
|
||||
anim_str = str(hex(anim_crc))
|
||||
|
||||
if debug:
|
||||
print("\tExtracting anim {}:".format(anim_str))
|
||||
|
||||
|
||||
#if anim_str in bpy.data.actions:
|
||||
# bpy.data.actions[anim_str].use_fake_user = False
|
||||
# bpy.data.actions.remove(bpy.data.actions[anim_str])
|
||||
|
||||
action = bpy.data.actions.new(anim_str)
|
||||
action.use_fake_user = True
|
||||
|
||||
animation = animation_set[anim_crc]
|
||||
|
||||
bone_crcs_list = [bone_crc_ for bone_crc_ in animation]
|
||||
|
||||
for bone_crc in sorted(bone_crcs_list):
|
||||
|
||||
bone_name = next((bone.name for bone in arma.pose.bones if to_crc(bone.name) == bone_crc), None)
|
||||
|
||||
if bone_name is None:
|
||||
continue
|
||||
|
||||
bone = arma.pose.bones[bone_name]
|
||||
|
||||
bone_crc = to_crc(bone.name)
|
||||
|
||||
if bone_crc not in animation:
|
||||
continue;
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
bone_curves = animation[bone_crc]
|
||||
num_frames = max(bone_curves[0])
|
||||
|
||||
has_translation = bone_curves[4] is not None
|
||||
|
||||
if debug:
|
||||
print("\t\tBone {} has {} frames: ".format(bone_name, num_frames))
|
||||
|
||||
last_values = [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
|
||||
|
||||
def get_quat(index):
|
||||
nonlocal bone_curves, last_values
|
||||
|
||||
q = Quaternion()
|
||||
valmap = [1,2,3,0]
|
||||
|
||||
has_key = False
|
||||
|
||||
for i in range(4):
|
||||
curve = bone_curves[i]
|
||||
if index in curve:
|
||||
has_key = True
|
||||
last_values[i] = curve[index]
|
||||
q[valmap[i]] = last_values[i]
|
||||
|
||||
return q if has_key else None
|
||||
|
||||
def get_vec(index):
|
||||
nonlocal bone_curves, last_values
|
||||
|
||||
v = Vector()
|
||||
has_key = False
|
||||
|
||||
for i in range(4,7):
|
||||
curve = bone_curves[i]
|
||||
if index in curve:
|
||||
has_key = True
|
||||
last_values[i] = curve[index]
|
||||
v[i - 4] = last_values[i]
|
||||
|
||||
return v if has_key else None
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
if has_translation:
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in range(num_frames):
|
||||
|
||||
q = get_quat(frame)
|
||||
if q is not None:
|
||||
|
||||
if debug:
|
||||
print("\t\t\tRot key: ({}, {})".format(frame, quat_to_str(q)))
|
||||
|
||||
# Very bloated, but works for now
|
||||
q = (bind_mat @ convert_rotation_space(q).to_matrix().to_4x4()).to_quaternion()
|
||||
fcurve_rot_w.keyframe_points.insert(frame,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(frame,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(frame,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(frame,q.z)
|
||||
|
||||
if has_translation:
|
||||
|
||||
t = get_vec(frame)
|
||||
if t is not None:
|
||||
|
||||
if debug:
|
||||
print("\t\t\tPos key: ({}, {})".format(frame, vec_to_str(t)))
|
||||
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(t))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(frame,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(frame,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(frame,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue