Compare commits
39 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
56f6ce6940 | ||
![]() |
f44c7bfdf3 | ||
![]() |
8bf2196991 | ||
![]() |
1252a6d192 | ||
![]() |
9fcdb3dfb7 | ||
![]() |
dd17fe902e | ||
![]() |
226682de8b | ||
![]() |
69e959e7a3 | ||
![]() |
188b270ad1 | ||
![]() |
9a344d0652 | ||
![]() |
ea82d24356 | ||
![]() |
6e05bba9e5 | ||
![]() |
9981b64d60 | ||
![]() |
48aabaf8d4 | ||
![]() |
329303e256 | ||
![]() |
4970f974fd | ||
![]() |
d359975bd6 | ||
![]() |
45782f9a2f | ||
![]() |
aa8a61175d | ||
![]() |
5692a60907 | ||
![]() |
7cfa101d42 | ||
![]() |
43b1f9650e | ||
![]() |
aa8f05dd42 | ||
![]() |
9bd8479e31 | ||
![]() |
8974131550 | ||
![]() |
abd727d39a | ||
![]() |
f69ed3f143 | ||
![]() |
f88b62c986 | ||
![]() |
13a92e46c6 | ||
![]() |
d6973e9793 | ||
![]() |
5d86a88411 | ||
![]() |
fe1e16a117 | ||
![]() |
4e2f6bf423 | ||
![]() |
bdcd4c4aa9 | ||
![]() |
c3f5f0bed3 | ||
![]() |
091e295649 | ||
![]() |
40343a2f69 | ||
![]() |
a962b4475e | ||
![]() |
637c3c2afa |
@@ -2,7 +2,7 @@ bl_info = {
|
||||
'name': 'SWBF .msh Import-Export',
|
||||
'author': 'Will Snyder, SleepKiller',
|
||||
"version": (1, 0, 0),
|
||||
'blender': (3, 0, 0),
|
||||
'blender': (2, 80, 0),
|
||||
'location': 'File > Import-Export',
|
||||
'description': 'Export as SWBF .msh file',
|
||||
'warning': '',
|
||||
@@ -55,14 +55,16 @@ if "bpy" in locals():
|
||||
import bpy
|
||||
from bpy_extras.io_utils import ExportHelper, ImportHelper
|
||||
from bpy.props import BoolProperty, EnumProperty, CollectionProperty
|
||||
from bpy.types import Operator
|
||||
from .msh_scene_utilities import create_scene
|
||||
from bpy.types import Operator, Menu
|
||||
from .msh_scene_utilities import create_scene, set_scene_animation
|
||||
from .msh_scene_save import save_scene
|
||||
from .msh_scene_read import read_scene
|
||||
from .msh_material_properties import *
|
||||
from .msh_skeleton_properties import *
|
||||
from .msh_collision_prim_properties import *
|
||||
from .msh_to_blend import *
|
||||
from .msh_material_operators import *
|
||||
from .msh_scene_to_blend import *
|
||||
from .msh_anim_to_blend import *
|
||||
from .zaa_to_blend import *
|
||||
|
||||
|
||||
@@ -103,32 +105,47 @@ class ExportMSH(Operator, ExportHelper):
|
||||
default=True
|
||||
)
|
||||
|
||||
export_with_animation: BoolProperty(
|
||||
name="Export With Animation",
|
||||
description="Includes animation data extracted from the action currently set on armature.",
|
||||
default=False
|
||||
)
|
||||
|
||||
export_as_skeleton: BoolProperty(
|
||||
name="Export Objects As Skeleton",
|
||||
description="Check if you intend to export skeleton data for consumption by ZenAsset.",
|
||||
default=False
|
||||
)
|
||||
animation_export: EnumProperty(name="Export Animation(s)",
|
||||
description="If/how animation data should be exported.",
|
||||
items=(
|
||||
('NONE', "None", "Do not include animation data in the export."),
|
||||
('ACTIVE', "Active", "Export animation extracted from the scene's Armature's active Action."),
|
||||
('BATCH', "Batch", "Export a separate animation file for each Action in the scene.")
|
||||
),
|
||||
default='NONE')
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
with open(self.filepath, 'wb') as output_file:
|
||||
save_scene(
|
||||
output_file=output_file,
|
||||
scene=create_scene(
|
||||
scene, armature_obj = create_scene(
|
||||
generate_triangle_strips=self.generate_triangle_strips,
|
||||
apply_modifiers=self.apply_modifiers,
|
||||
export_target=self.export_target,
|
||||
skel_only=self.export_as_skeleton,
|
||||
export_anim=self.export_with_animation
|
||||
),
|
||||
)
|
||||
skel_only=self.animation_export != 'NONE') # Exclude geometry data (except root stuff) if we're doing anims
|
||||
|
||||
if self.animation_export != 'NONE' and not armature_obj:
|
||||
raise Exception("Could not find an armature object from which to export animations!")
|
||||
|
||||
|
||||
def write_scene_to_file(filepath : str, scene_to_write : Scene):
|
||||
with open(filepath, 'wb') as output_file:
|
||||
save_scene(output_file=output_file, scene=scene_to_write)
|
||||
|
||||
if self.animation_export == 'ACTIVE':
|
||||
set_scene_animation(scene, armature_obj)
|
||||
write_scene_to_file(self.filepath, scene)
|
||||
|
||||
elif self.animation_export == 'BATCH':
|
||||
export_dir = self.filepath if os.path.isdir(self.filepath) else os.path.dirname(self.filepath)
|
||||
|
||||
for action in bpy.data.actions:
|
||||
anim_save_path = os.path.join(export_dir, action.name + ".msh")
|
||||
armature_obj.animation_data.action = action
|
||||
set_scene_animation(scene, armature_obj)
|
||||
write_scene_to_file(anim_save_path, scene)
|
||||
else:
|
||||
write_scene_to_file(self.filepath, scene)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@@ -139,13 +156,11 @@ def menu_func_export(self, context):
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class ImportMSH(Operator, ImportHelper):
|
||||
""" Import an SWBF .msh file. """
|
||||
|
||||
bl_idname = "swbf_msh.import"
|
||||
bl_label = "Import SWBF .msh File"
|
||||
bl_label = "Import SWBF .msh File(s)"
|
||||
filename_ext = ".msh"
|
||||
|
||||
files: CollectionProperty(
|
||||
@@ -160,8 +175,8 @@ class ImportMSH(Operator, ImportHelper):
|
||||
)
|
||||
|
||||
animation_only: BoolProperty(
|
||||
name="Import Animation Only",
|
||||
description="Import animation and append as a new action to currently selected armature.",
|
||||
name="Import Animation(s)",
|
||||
description="Import on or more animations from the selected files and append each as a new Action to currently selected Armature.",
|
||||
default=False
|
||||
)
|
||||
|
||||
@@ -208,6 +223,12 @@ def register():
|
||||
bpy.types.Material.swbf_msh_mat = bpy.props.PointerProperty(type=MaterialProperties)
|
||||
bpy.types.Armature.swbf_msh_skel = bpy.props.CollectionProperty(type=SkeletonProperties)
|
||||
|
||||
bpy.utils.register_class(FillSWBFMaterialProperties)
|
||||
bpy.utils.register_class(VIEW3D_MT_SWBF)
|
||||
bpy.types.VIEW3D_MT_object_context_menu.append(draw_matfill_menu)
|
||||
|
||||
bpy.utils.register_class(GenerateMaterialNodesFromSWBFProperties)
|
||||
|
||||
|
||||
|
||||
def unregister():
|
||||
@@ -225,6 +246,14 @@ def unregister():
|
||||
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
|
||||
bpy.types.TOPBAR_MT_file_import.remove(menu_func_import)
|
||||
|
||||
bpy.utils.unregister_class(FillSWBFMaterialProperties)
|
||||
|
||||
bpy.utils.unregister_class(VIEW3D_MT_SWBF)
|
||||
bpy.types.VIEW3D_MT_object_context_menu.remove(draw_matfill_menu)
|
||||
|
||||
bpy.utils.unregister_class(GenerateMaterialNodesFromSWBFProperties)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@@ -39,7 +39,7 @@ def extract_anim(armature: bpy.types.Armature, root_name: str) -> Animation:
|
||||
# If it doesn't have a preserved skeleton, then we add the scene root.
|
||||
# If it does have a preserved skeleton, any objects not animatable by blender (i.e. objects above the skeleton, scene root)
|
||||
# will be included in the preserved skeleton
|
||||
if not has_preserved_skeleton(armature):
|
||||
if len(armature.data.swbf_msh_skel):
|
||||
keyable_bones.add(root_name)
|
||||
|
||||
# Subset of above bones to key with dummy frames (all bones not in armature)
|
||||
|
106
addons/io_scene_swbf_msh/msh_anim_to_blend.py
Normal file
106
addons/io_scene_swbf_msh/msh_anim_to_blend.py
Normal file
@@ -0,0 +1,106 @@
|
||||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_skeleton_to_blend import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
from .msh_mesh_to_blend import model_to_mesh_object
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
# Extracts and applies anims in the scene to the currently selected armature
|
||||
def extract_and_apply_anim(filename : str, scene : Scene):
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
|
||||
if not arma or arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if scene.animation is None:
|
||||
raise Exception("No animation found in msh file!")
|
||||
|
||||
else:
|
||||
head, tail = os.path.split(filename)
|
||||
anim_name = tail.split(".")[0]
|
||||
|
||||
if anim_name in bpy.data.actions:
|
||||
bpy.data.actions.remove(bpy.data.actions[anim_name], do_unlink=True)
|
||||
|
||||
action = bpy.data.actions.new(anim_name)
|
||||
action.use_fake_user = True
|
||||
|
||||
if not arma.animation_data:
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
# Record the starting transforms of each bone. Pose space is relative
|
||||
# to bones starting transforms. Starting = in edit mode
|
||||
bone_bind_poses = {}
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if edit_bone.parent:
|
||||
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bone_local = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bone_local.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
for bone in arma.pose.bones:
|
||||
if to_crc(bone.name) in scene.animation.bone_frames:
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
|
||||
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
|
||||
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
for frame in rotation_frames:
|
||||
i = frame.index
|
||||
q = (bind_mat @ convert_rotation_space(frame.rotation).to_matrix().to_4x4()).to_quaternion()
|
||||
|
||||
fcurve_rot_w.keyframe_points.insert(i,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(i,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(i,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(i,q.z)
|
||||
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in translation_frames:
|
||||
i = frame.index
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(frame.translation))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(i,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(i,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(i,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
|
@@ -7,6 +7,8 @@ from .msh_material import *
|
||||
|
||||
from .msh_material_utilities import _RENDERTYPES_MAPPING
|
||||
|
||||
import os
|
||||
|
||||
def gather_materials() -> Dict[str, Material]:
|
||||
""" Gathers the Blender materials and returns them as
|
||||
a dictionary of strings and Material objects. """
|
||||
@@ -35,16 +37,16 @@ def read_material(blender_material: bpy.types.Material) -> Material:
|
||||
result.data = _read_material_props_data(props)
|
||||
|
||||
if "UNSUPPORTED" not in props.rendertype:
|
||||
result.texture0 = props.diffuse_map
|
||||
result.texture0 = os.path.basename(props.diffuse_map)
|
||||
result.texture1 = _read_normal_map_or_distortion_map_texture(props)
|
||||
result.texture2 = _read_detail_texture(props)
|
||||
result.texture3 = _read_envmap_texture(props)
|
||||
|
||||
else:
|
||||
result.texture0 = props.texture_0
|
||||
result.texture1 = props.texture_1
|
||||
result.texture2 = props.texture_2
|
||||
result.texture3 = props.texture_3
|
||||
result.texture0 = os.path.basename(props.texture_0)
|
||||
result.texture1 = os.path.basename(props.texture_1)
|
||||
result.texture2 = os.path.basename(props.texture_2)
|
||||
result.texture3 = os.path.basename(props.texture_3)
|
||||
|
||||
return result
|
||||
|
||||
@@ -96,11 +98,13 @@ def _read_material_props_data(props) -> Tuple[int, int]:
|
||||
|
||||
return (props.detail_map_tiling_u, props.detail_map_tiling_v)
|
||||
|
||||
|
||||
|
||||
def _read_normal_map_or_distortion_map_texture(props) -> str:
|
||||
if "REFRACTION" in props.rendertype:
|
||||
return props.distortion_map
|
||||
return os.path.basename(props.distortion_map)
|
||||
if "NORMALMAPPED" in props.rendertype:
|
||||
return props.normal_map
|
||||
return os.path.basename(props.normal_map)
|
||||
|
||||
return ""
|
||||
|
||||
@@ -108,10 +112,10 @@ def _read_detail_texture(props) -> str:
|
||||
if "REFRACTION" in props.rendertype:
|
||||
return ""
|
||||
|
||||
return props.detail_map
|
||||
return os.path.basename(props.detail_map)
|
||||
|
||||
def _read_envmap_texture(props) -> str:
|
||||
if "ENVMAPPED" not in props.rendertype:
|
||||
return ""
|
||||
|
||||
return props.environment_map
|
||||
return os.path.basename(props.environment_map)
|
||||
|
307
addons/io_scene_swbf_msh/msh_material_operators.py
Normal file
307
addons/io_scene_swbf_msh/msh_material_operators.py
Normal file
@@ -0,0 +1,307 @@
|
||||
""" Operators for basic emulation and mapping of SWBF material system in Blender.
|
||||
Only relevant if the builtin Eevee renderer is being used! """
|
||||
|
||||
import bpy
|
||||
|
||||
from .msh_material_properties import *
|
||||
|
||||
from math import sqrt
|
||||
|
||||
from bpy.props import BoolProperty, EnumProperty, StringProperty
|
||||
from bpy.types import Operator, Menu
|
||||
|
||||
from .option_file_parser import MungeOptions
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
# FillSWBFMaterialProperties
|
||||
|
||||
# Iterates through all material slots of all selected
|
||||
# objects and fills basic SWBF material properties
|
||||
# from any Principled BSDF nodes it finds.
|
||||
|
||||
|
||||
class FillSWBFMaterialProperties(bpy.types.Operator):
|
||||
bl_idname = "swbf_msh.fill_mat_props"
|
||||
bl_label = "Fill SWBF Material Properties"
|
||||
bl_description = ("Fill in SWBF properties of all materials used by selected objects.\n"
|
||||
"Only considers materials that use nodes.\n"
|
||||
"Please see 'Materials > Materials Operators' in the docs for more details.")
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
slots = sum([list(ob.material_slots) for ob in bpy.context.selected_objects if ob.type == 'MESH'],[])
|
||||
mats = [slot.material for slot in slots if (slot.material and slot.material.node_tree)]
|
||||
|
||||
mats_visited = set()
|
||||
|
||||
for mat in mats:
|
||||
|
||||
if mat.name in mats_visited or not mat.swbf_msh_mat:
|
||||
continue
|
||||
else:
|
||||
mats_visited.add(mat.name)
|
||||
|
||||
mat.swbf_msh_mat.doublesided = not mat.use_backface_culling
|
||||
mat.swbf_msh_mat.hardedged_transparency = (mat.blend_method == "CLIP")
|
||||
mat.swbf_msh_mat.blended_transparency = (mat.blend_method == "BLEND")
|
||||
|
||||
|
||||
# Below is all for filling the diffuse map/texture_0 fields
|
||||
|
||||
try:
|
||||
for BSDF_node in [n for n in mat.node_tree.nodes if n.type == 'BSDF_PRINCIPLED']:
|
||||
base_col = BSDF_node.inputs['Base Color']
|
||||
|
||||
stack = []
|
||||
|
||||
texture_node = None
|
||||
|
||||
current_socket = base_col
|
||||
if base_col.is_linked:
|
||||
stack.append(base_col.links[0].from_node)
|
||||
|
||||
while stack:
|
||||
|
||||
curr_node = stack.pop()
|
||||
|
||||
if curr_node.type == 'TEX_IMAGE':
|
||||
texture_node = curr_node
|
||||
break
|
||||
else:
|
||||
next_nodes = []
|
||||
for node_input in curr_node.inputs:
|
||||
for link in node_input.links:
|
||||
next_nodes.append(link.from_node)
|
||||
# reversing it so we go from up to down
|
||||
stack += reversed(next_nodes)
|
||||
|
||||
|
||||
if texture_node is not None:
|
||||
|
||||
tex_path = texture_node.image.filepath
|
||||
|
||||
tex_name = os.path.basename(tex_path)
|
||||
|
||||
i = tex_name.find('.')
|
||||
|
||||
# Get rid of trailing number in case one is present
|
||||
if i > 0:
|
||||
tex_name = tex_name[0:i] + ".tga"
|
||||
|
||||
refined_tex_path = os.path.join(os.path.dirname(tex_path), tex_name)
|
||||
|
||||
mat.swbf_msh_mat.diffuse_map = refined_tex_path
|
||||
mat.swbf_msh_mat.texture_0 = refined_tex_path
|
||||
|
||||
break
|
||||
except:
|
||||
# Many chances for null ref exceptions. None if user reads doc section...
|
||||
pass
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class VIEW3D_MT_SWBF(bpy.types.Menu):
|
||||
bl_label = "SWBF"
|
||||
|
||||
def draw(self, _context):
|
||||
layout = self.layout
|
||||
layout.operator("swbf_msh.fill_mat_props", text="Fill SWBF Material Properties")
|
||||
|
||||
|
||||
def draw_matfill_menu(self, context):
|
||||
layout = self.layout
|
||||
layout.separator()
|
||||
layout.menu("VIEW3D_MT_SWBF")
|
||||
|
||||
|
||||
|
||||
|
||||
# GenerateMaterialNodesFromSWBFProperties
|
||||
|
||||
# Creates shader nodes to emulate SWBF material properties.
|
||||
# Will probably only support for a narrow subset of properties...
|
||||
|
||||
class GenerateMaterialNodesFromSWBFProperties(bpy.types.Operator):
|
||||
|
||||
bl_idname = "swbf_msh.generate_material_nodes"
|
||||
bl_label = "Generate Nodes"
|
||||
bl_description= """Generate Cycles shader nodes from SWBF material properties.
|
||||
|
||||
The nodes generated are meant to give one a general idea
|
||||
of how the material would look ingame. They cannot
|
||||
to provide an exact emulation"""
|
||||
|
||||
|
||||
material_name: StringProperty(
|
||||
name = "Material Name",
|
||||
description = "Name of material whose SWBF properties the generated nodes will emulate."
|
||||
)
|
||||
|
||||
fail_silently: BoolProperty(
|
||||
name = "Fail Silently"
|
||||
)
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
material = bpy.data.materials.get(self.material_name, None)
|
||||
|
||||
if not material or not material.swbf_msh_mat:
|
||||
return {'CANCELLED'}
|
||||
|
||||
mat_props = material.swbf_msh_mat
|
||||
|
||||
|
||||
texture_input_nodes = []
|
||||
surface_output_nodes = []
|
||||
|
||||
# Op will give up if no diffuse map is present.
|
||||
# Eventually more nuance will be added for different
|
||||
# rtypes
|
||||
diffuse_texture_path = mat_props.diffuse_map
|
||||
if diffuse_texture_path and os.path.exists(diffuse_texture_path):
|
||||
|
||||
material.use_nodes = True
|
||||
material.node_tree.nodes.clear()
|
||||
|
||||
bsdf = material.node_tree.nodes.new("ShaderNodeBsdfPrincipled")
|
||||
|
||||
|
||||
texImage = material.node_tree.nodes.new('ShaderNodeTexImage')
|
||||
texImage.image = bpy.data.images.load(diffuse_texture_path)
|
||||
texImage.image.alpha_mode = 'CHANNEL_PACKED'
|
||||
material.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
|
||||
|
||||
texture_input_nodes.append(texImage)
|
||||
|
||||
bsdf.inputs["Roughness"].default_value = 1.0
|
||||
bsdf.inputs["Specular"].default_value = 0.0
|
||||
|
||||
if mat_props.hardedged_transparency and not mat_props.glow:
|
||||
material.blend_method = "CLIP"
|
||||
material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha'])
|
||||
|
||||
material.use_backface_culling = not bool(mat_props.doublesided)
|
||||
|
||||
surface_output_nodes.append(tuple(('BSDF', bsdf)))
|
||||
|
||||
|
||||
# Glow (adds another shader output)
|
||||
if mat_props.glow:
|
||||
|
||||
emission = material.node_tree.nodes.new("ShaderNodeEmission")
|
||||
material.node_tree.links.new(emission.inputs['Color'], texImage.outputs['Color'])
|
||||
|
||||
emission_strength_multiplier = material.node_tree.nodes.new("ShaderNodeMath")
|
||||
emission_strength_multiplier.operation = 'MULTIPLY'
|
||||
emission_strength_multiplier.inputs[1].default_value = 32.0
|
||||
|
||||
material.node_tree.links.new(emission_strength_multiplier.inputs[0], texImage.outputs['Alpha'])
|
||||
|
||||
material.node_tree.links.new(emission.inputs['Strength'], emission_strength_multiplier.outputs[0])
|
||||
|
||||
surface_output_nodes.append(tuple(("Emission", emission)))
|
||||
|
||||
surfaces_output = None
|
||||
if (len(surface_output_nodes) == 1):
|
||||
surfaces_output = surface_output_nodes[0][1]
|
||||
else:
|
||||
mix = material.node_tree.nodes.new("ShaderNodeMixShader")
|
||||
material.node_tree.links.new(mix.inputs[1], surface_output_nodes[0][1].outputs[0])
|
||||
material.node_tree.links.new(mix.inputs[2], surface_output_nodes[1][1].outputs[0])
|
||||
|
||||
surfaces_output = mix
|
||||
|
||||
# Normal/bump mapping (needs more rendertype support!)
|
||||
if "NORMALMAP" in mat_props.rendertype and mat_props.normal_map and os.path.exists(mat_props.normal_map):
|
||||
normalMapTexImage = material.node_tree.nodes.new('ShaderNodeTexImage')
|
||||
normalMapTexImage.image = bpy.data.images.load(mat_props.normal_map)
|
||||
normalMapTexImage.image.alpha_mode = 'CHANNEL_PACKED'
|
||||
normalMapTexImage.image.colorspace_settings.name = 'Non-Color'
|
||||
texture_input_nodes.append(normalMapTexImage)
|
||||
|
||||
options = MungeOptions(mat_props.normal_map + ".option")
|
||||
|
||||
if options.get_bool("bumpmap"):
|
||||
|
||||
# First we must convert the RGB data to brightness
|
||||
rgb_to_bw_node = material.node_tree.nodes.new("ShaderNodeRGBToBW")
|
||||
material.node_tree.links.new(rgb_to_bw_node.inputs["Color"], normalMapTexImage.outputs["Color"])
|
||||
|
||||
# Now create a bump map node (perhaps we could also use this with normals and just plug color into normal input?)
|
||||
bumpMapNode = material.node_tree.nodes.new('ShaderNodeBump')
|
||||
bumpMapNode.inputs["Distance"].default_value = options.get_float("bumpscale", default=1.0)
|
||||
material.node_tree.links.new(bumpMapNode.inputs["Height"], rgb_to_bw_node.outputs["Val"])
|
||||
|
||||
normalsOutputNode = bumpMapNode
|
||||
|
||||
else:
|
||||
|
||||
normalMapNode = material.node_tree.nodes.new('ShaderNodeNormalMap')
|
||||
material.node_tree.links.new(normalMapNode.inputs["Color"], normalMapTexImage.outputs["Color"])
|
||||
|
||||
normalsOutputNode = normalMapNode
|
||||
|
||||
material.node_tree.links.new(bsdf.inputs['Normal'], normalsOutputNode.outputs["Normal"])
|
||||
|
||||
|
||||
|
||||
output = material.node_tree.nodes.new("ShaderNodeOutputMaterial")
|
||||
material.node_tree.links.new(output.inputs['Surface'], surfaces_output.outputs[0])
|
||||
|
||||
|
||||
|
||||
# Scrolling
|
||||
# This approach works 90% of the time, but notably produces very incorrect results
|
||||
# on mus1_bldg_world_1,2,3
|
||||
|
||||
# Clear all anims in all cases
|
||||
if material.node_tree.animation_data:
|
||||
material.node_tree.animation_data.action.fcurves.clear()
|
||||
|
||||
|
||||
if "SCROLL" in mat_props.rendertype:
|
||||
uv_input = material.node_tree.nodes.new("ShaderNodeUVMap")
|
||||
|
||||
vector_add = material.node_tree.nodes.new("ShaderNodeVectorMath")
|
||||
|
||||
# Add keyframes
|
||||
scroll_per_sec_divisor = 255.0
|
||||
frame_step = 60.0
|
||||
fps = bpy.context.scene.render.fps
|
||||
for i in range(2):
|
||||
vector_add.inputs[1].default_value[0] = i * mat_props.scroll_speed_u * frame_step / scroll_per_sec_divisor
|
||||
vector_add.inputs[1].keyframe_insert("default_value", index=0, frame=i * frame_step * fps)
|
||||
|
||||
vector_add.inputs[1].default_value[1] = i * mat_props.scroll_speed_v * frame_step / scroll_per_sec_divisor
|
||||
vector_add.inputs[1].keyframe_insert("default_value", index=1, frame=i * frame_step * fps)
|
||||
|
||||
|
||||
material.node_tree.links.new(vector_add.inputs[0], uv_input.outputs[0])
|
||||
|
||||
for texture_node in texture_input_nodes:
|
||||
material.node_tree.links.new(texture_node.inputs["Vector"], vector_add.outputs[0])
|
||||
|
||||
# Don't know how to set interpolation when adding keyframes
|
||||
# so we must do it after the fact
|
||||
if material.node_tree.animation_data:
|
||||
for fcurve in material.node_tree.animation_data.action.fcurves:
|
||||
for kf in fcurve.keyframe_points.values():
|
||||
kf.interpolation = 'LINEAR'
|
||||
|
||||
'''
|
||||
else:
|
||||
|
||||
# Todo: figure out some way to raise an error but continue operator execution...
|
||||
if self.fail_silently:
|
||||
return {'CANCELLED'}
|
||||
else:
|
||||
raise RuntimeError(f"Diffuse texture at path: '{diffuse_texture_path}' was not found.")
|
||||
'''
|
||||
|
||||
return {'FINISHED'}
|
||||
|
@@ -9,8 +9,12 @@ from .msh_material_ui_strings import *
|
||||
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
|
||||
|
||||
|
||||
from .msh_material_operators import GenerateMaterialNodesFromSWBFProperties
|
||||
|
||||
|
||||
|
||||
UI_MATERIAL_RENDERTYPES = (
|
||||
('NORMAL_BF2', "00 Normal (SWBF2)", UI_RENDERTYPE_NORMAL_BF2_DESC),
|
||||
('NORMAL_BF2', "00 Standard (SWBF2)", UI_RENDERTYPE_NORMAL_BF2_DESC),
|
||||
('SCROLLING_BF2', "03 Scrolling (SWBF2)", UI_RENDERTYPE_SCROLLING_BF2_DESC),
|
||||
('ENVMAPPED_BF2', "06 Envmapped (SWBF2)", UI_RENDERTYPE_ENVMAPPED_BF2_DESC),
|
||||
('ANIMATED_BF2', "07 Animated (SWBF2)", UI_RENDERTYPE_ANIMATED_BF2_DESC),
|
||||
@@ -167,26 +171,31 @@ class MaterialProperties(PropertyGroup):
|
||||
description="The basic diffuse map for the material. The alpha channel "
|
||||
"is either the Transparency Map, Glow Map or Gloss Map, "
|
||||
"depending on the selected rendertype and flags.",
|
||||
default="white.tga")
|
||||
default="white.tga",
|
||||
subtype='FILE_PATH')
|
||||
|
||||
detail_map: StringProperty(name="Detail Map",
|
||||
description="Detail maps allow you to add in 'detail' to the diffuse "
|
||||
"map at runtime. Or they can be used as fake ambient occlusion "
|
||||
"maps or even wacky emissive maps. See docs for more details.")
|
||||
"maps or even wacky emissive maps. See docs for more details.",
|
||||
subtype='FILE_PATH')
|
||||
|
||||
normal_map: StringProperty(name="Normal Map",
|
||||
description="Normal maps can provide added detail from lighting. "
|
||||
"If Specular is enabled the alpha channel will be "
|
||||
"the Gloss Map.")
|
||||
"the Gloss Map.",
|
||||
subtype='FILE_PATH')
|
||||
|
||||
environment_map: StringProperty(name="Environment Map",
|
||||
description="Environment map for the material. Provides static "
|
||||
"reflections around the surface. Must be a cubemap.")
|
||||
"reflections around the surface. Must be a cubemap.",
|
||||
subtype='FILE_PATH')
|
||||
|
||||
distortion_map: StringProperty(name="Distortion Map",
|
||||
description="Distortion maps control how Refractive materials "
|
||||
"distort the scene behind them. Should be a normal map "
|
||||
"with '-forceformat v8u8' in it's '.tga.option' file.")
|
||||
"with '-forceformat v8u8' in it's '.tga.option' file.",
|
||||
subtype='FILE_PATH')
|
||||
|
||||
# Below props are for yet unsupported render types
|
||||
data_value_0: IntProperty(name="", description="First data value")
|
||||
@@ -194,10 +203,10 @@ class MaterialProperties(PropertyGroup):
|
||||
|
||||
rendertype_value: IntProperty(name="Rendertype Value", description="Raw number value of rendertype.", min=0, max=31)
|
||||
|
||||
texture_0: StringProperty(name="1", description="First texture slot")
|
||||
texture_1: StringProperty(name="2", description="Second texture slot")
|
||||
texture_2: StringProperty(name="3", description="Third texture slot")
|
||||
texture_3: StringProperty(name="4", description="Fourth texture slot")
|
||||
texture_0: StringProperty(name="1", description="First texture slot", subtype='FILE_PATH', default="white.tga")
|
||||
texture_1: StringProperty(name="2", description="Second texture slot", subtype='FILE_PATH')
|
||||
texture_2: StringProperty(name="3", description="Third texture slot", subtype='FILE_PATH')
|
||||
texture_3: StringProperty(name="4", description="Fourth texture slot", subtype='FILE_PATH')
|
||||
|
||||
|
||||
class MaterialPropertiesPanel(bpy.types.Panel):
|
||||
@@ -285,3 +294,8 @@ class MaterialPropertiesPanel(bpy.types.Panel):
|
||||
layout.prop(material_props, "texture_2")
|
||||
layout.prop(material_props, "texture_3")
|
||||
|
||||
|
||||
op_props = layout.operator("swbf_msh.generate_material_nodes", text="Generate Nodes")
|
||||
op_props.material_name = context.material.name
|
||||
op_props.fail_silently = False
|
||||
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
import bpy
|
||||
from typing import Dict
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import *
|
||||
|
||||
from .msh_material_properties import *
|
||||
from .msh_material import *
|
||||
|
||||
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
|
||||
|
||||
@@ -14,6 +14,8 @@ import os
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def find_texture_path(folder_path : str, name : str) -> str:
|
||||
|
||||
if not folder_path or not name:
|
||||
@@ -30,11 +32,23 @@ def find_texture_path(folder_path : str, name : str) -> str:
|
||||
if os.path.exists(possible_path):
|
||||
return possible_path
|
||||
|
||||
return ""
|
||||
return name
|
||||
|
||||
|
||||
|
||||
def fill_material_props(material : Material, material_properties):
|
||||
def swbf_material_to_blend(material_name : str, material : Material, folder_path : str) -> bpy.types.Material:
|
||||
|
||||
new_mat = bpy.data.materials.new(name=material_name)
|
||||
|
||||
fill_material_props(material, new_mat.swbf_msh_mat, folder_path)
|
||||
|
||||
bpy.ops.swbf_msh.generate_material_nodes('EXEC_DEFAULT', material_name=new_mat.name, fail_silently=True)
|
||||
|
||||
return new_mat
|
||||
|
||||
|
||||
|
||||
def fill_material_props(material, material_properties, folder_path):
|
||||
""" Fills MaterialProperties from Material instance """
|
||||
|
||||
if material_properties is None or material is None:
|
||||
@@ -47,7 +61,7 @@ def fill_material_props(material : Material, material_properties):
|
||||
_fill_material_props_rendertype(material, material_properties)
|
||||
_fill_material_props_flags(material, material_properties)
|
||||
_fill_material_props_data(material, material_properties)
|
||||
_fill_material_props_texture_maps(material, material_properties)
|
||||
_fill_material_props_texture_maps(material, material_properties, folder_path)
|
||||
|
||||
|
||||
|
||||
@@ -92,7 +106,7 @@ def _fill_material_props_data(material, material_properties):
|
||||
anim_length_index = int(sqrt(material.data[0]))
|
||||
if anim_length_index < 0:
|
||||
anim_length_index = 0
|
||||
elif anim_length_index > len(UI_MATERIAL_ANIMATION_LENGTHS):
|
||||
elif anim_length_index >= len(UI_MATERIAL_ANIMATION_LENGTHS):
|
||||
anim_length_index = len(UI_MATERIAL_ANIMATION_LENGTHS) - 1
|
||||
|
||||
material_properties.animation_length = UI_MATERIAL_ANIMATION_LENGTHS[anim_length_index][0]
|
||||
@@ -102,15 +116,22 @@ def _fill_material_props_data(material, material_properties):
|
||||
material_properties.detail_map_tiling_v = material.data[1]
|
||||
|
||||
|
||||
def _fill_material_props_texture_maps(material, material_properties):
|
||||
def _fill_material_props_texture_maps(material, material_properties, folder_path):
|
||||
|
||||
t0path = find_texture_path(folder_path, material.texture0)
|
||||
t1path = find_texture_path(folder_path, material.texture1)
|
||||
t2path = find_texture_path(folder_path, material.texture2)
|
||||
t3path = find_texture_path(folder_path, material.texture3)
|
||||
|
||||
material_properties.texture_0 = t0path
|
||||
material_properties.texture_1 = t1path
|
||||
material_properties.texture_2 = t2path
|
||||
material_properties.texture_3 = t3path
|
||||
|
||||
material_properties.diffuse_map = t0path
|
||||
material_properties.distortion_map = t1path
|
||||
material_properties.normal_map = t1path
|
||||
material_properties.detail_map = t2path
|
||||
material_properties.environment_map = t3path
|
||||
|
||||
material_properties.texture_0 = material.texture0
|
||||
material_properties.texture_1 = material.texture1
|
||||
material_properties.texture_2 = material.texture2
|
||||
material_properties.texture_3 = material.texture3
|
||||
|
||||
material_properties.diffuse_map = material.texture0
|
||||
material_properties.distortion_map = material.texture1
|
||||
material_properties.normal_map = material.texture1
|
||||
material_properties.detail_map = material.texture2
|
||||
material_properties.environment_map = material.texture3
|
||||
|
@@ -18,19 +18,7 @@ _RENDERTYPES_MAPPING = {
|
||||
"NORMALMAPPED_TILED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_TILED_ENVMAP}
|
||||
|
||||
|
||||
_REVERSE_RENDERTYPES_MAPPING = {
|
||||
Rendertype.NORMAL : "NORMAL_BF2",
|
||||
Rendertype.SCROLLING : "SCROLLING_BF2",
|
||||
Rendertype.ENVMAPPED : "ENVMAPPED_BF2",
|
||||
Rendertype.ANIMATED : "ANIMATED_BF2",
|
||||
Rendertype.REFRACTION : "REFRACTION_BF2",
|
||||
Rendertype.BLINK : "BLINK_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED : "NORMALMAPPED_TILED_BF2",
|
||||
Rendertype.NORMALMAPPED_ENVMAPPED : "NORMALMAPPED_ENVMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED : "NORMALMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED_ENVMAP : "NORMALMAPPED_TILED_ENVMAPPED_BF2"}
|
||||
|
||||
|
||||
_REVERSE_RENDERTYPES_MAPPING = {val: key for (key, val) in _RENDERTYPES_MAPPING.items()}
|
||||
|
||||
|
||||
def remove_unused_materials(materials: Dict[str, Material],
|
||||
|
191
addons/io_scene_swbf_msh/msh_mesh_to_blend.py
Normal file
191
addons/io_scene_swbf_msh/msh_mesh_to_blend.py
Normal file
@@ -0,0 +1,191 @@
|
||||
""" Converts msh meshes to Blender counterparts """
|
||||
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def validate_segment_geometry(segment : GeometrySegment):
|
||||
if not segment.positions:
|
||||
return False
|
||||
if not segment.triangles and not segment.triangle_strips and not segment.polygons:
|
||||
return False
|
||||
if not segment.material_name:
|
||||
return False
|
||||
if not segment.normals:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str, bpy.types.Material]) -> bpy.types.Object:
|
||||
|
||||
blender_mesh = bpy.data.meshes.new(model.name)
|
||||
|
||||
# Per vertex data which will eventually be remapped to loops
|
||||
vertex_positions = []
|
||||
vertex_uvs = []
|
||||
vertex_normals = []
|
||||
|
||||
# Keeps track of which vertices each group of weights affects
|
||||
# i.e. maps offset of vertices -> weights that affect them
|
||||
vertex_weights_offsets = {}
|
||||
|
||||
# Since polygons in a msh segment index into the segment's verts,
|
||||
# we must keep an offset to index them into the verts of the whole mesh
|
||||
polygon_index_offset = 0
|
||||
|
||||
# List of tuples of face indices
|
||||
polygons = []
|
||||
|
||||
# Each polygon has an index into the mesh's material list
|
||||
current_material_index = 0
|
||||
polygon_material_indices = []
|
||||
|
||||
|
||||
if model.geometry:
|
||||
|
||||
for segment in model.geometry:
|
||||
|
||||
if not validate_segment_geometry(segment):
|
||||
continue
|
||||
|
||||
blender_mesh.materials.append(materials_map[segment.material_name])
|
||||
|
||||
vertex_positions += [tuple(convert_vector_space(p)) for p in segment.positions]
|
||||
|
||||
if segment.texcoords:
|
||||
vertex_uvs += [tuple(texcoord) for texcoord in segment.texcoords]
|
||||
else:
|
||||
vertex_uvs += [(0.0,0.0) for _ in range(len(segment.positions))]
|
||||
|
||||
if segment.normals:
|
||||
vertex_normals += [tuple(convert_vector_space(n)) for n in segment.normals]
|
||||
|
||||
if segment.weights:
|
||||
vertex_weights_offsets[polygon_index_offset] = segment.weights
|
||||
|
||||
|
||||
segment_polygons = []
|
||||
|
||||
if segment.triangles:
|
||||
segment_polygons = [tuple([ind + polygon_index_offset for ind in tri]) for tri in segment.triangles]
|
||||
elif segment.triangle_strips:
|
||||
winding = [0,1,2]
|
||||
rwinding = [1,0,2]
|
||||
for strip in segment.triangle_strips:
|
||||
for i in range(len(strip) - 2):
|
||||
strip_tri = tuple([polygon_index_offset + strip[i+j] for j in (winding if i % 2 == 0 else rwinding)])
|
||||
segment_polygons.append(strip_tri)
|
||||
elif segment.polygons:
|
||||
segment_polygons = [tuple([ind + polygon_index_offset for ind in polygon]) for polygon in segment.polygons]
|
||||
|
||||
polygon_index_offset += len(segment.positions)
|
||||
|
||||
polygons += segment_polygons
|
||||
|
||||
polygon_material_indices += [current_material_index for _ in segment_polygons]
|
||||
current_material_index += 1
|
||||
|
||||
'''
|
||||
Start building the blender mesh
|
||||
'''
|
||||
|
||||
# VERTICES
|
||||
|
||||
# This is all we have to do for vertices, other attributes are done per-loop
|
||||
blender_mesh.vertices.add(len(vertex_positions))
|
||||
blender_mesh.vertices.foreach_set("co", [component for vertex_position in vertex_positions for component in vertex_position])
|
||||
|
||||
|
||||
# LOOPS
|
||||
|
||||
flat_indices = [index for polygon in polygons for index in polygon]
|
||||
|
||||
blender_mesh.loops.add(len(flat_indices))
|
||||
|
||||
# Position indices
|
||||
blender_mesh.loops.foreach_set("vertex_index", flat_indices)
|
||||
|
||||
# Normals
|
||||
blender_mesh.create_normals_split()
|
||||
blender_mesh.loops.foreach_set("normal", [component for i in flat_indices for component in vertex_normals[i]])
|
||||
|
||||
# UVs
|
||||
blender_mesh.uv_layers.new(do_init=False)
|
||||
blender_mesh.uv_layers[0].data.foreach_set("uv", [component for i in flat_indices for component in vertex_uvs[i]])
|
||||
|
||||
|
||||
|
||||
# POLYGONS/FACES
|
||||
|
||||
blender_mesh.polygons.add(len(polygons))
|
||||
|
||||
# Indices of starting loop for each polygon
|
||||
polygon_loop_start_indices = []
|
||||
current_polygon_start_index = 0
|
||||
|
||||
# Number of loops in this polygon. Polygon i will use
|
||||
# loops from polygon_loop_start_indices[i] to
|
||||
# polygon_loop_start_indices[i] + polygon_loop_totals[i]
|
||||
polygon_loop_totals = []
|
||||
|
||||
for polygon in polygons:
|
||||
polygon_loop_start_indices.append(current_polygon_start_index)
|
||||
|
||||
current_polygon_length = len(polygon)
|
||||
current_polygon_start_index += current_polygon_length
|
||||
|
||||
polygon_loop_totals.append(current_polygon_length)
|
||||
|
||||
blender_mesh.polygons.foreach_set("loop_start", polygon_loop_start_indices)
|
||||
blender_mesh.polygons.foreach_set("loop_total", polygon_loop_totals)
|
||||
blender_mesh.polygons.foreach_set("material_index", polygon_material_indices)
|
||||
blender_mesh.polygons.foreach_set("use_smooth", [True for _ in polygons])
|
||||
|
||||
blender_mesh.validate(clean_customdata=False)
|
||||
blender_mesh.update()
|
||||
|
||||
|
||||
# Reset custom normals after calling update/validate
|
||||
reset_normals = [0.0] * (len(blender_mesh.loops) * 3)
|
||||
blender_mesh.loops.foreach_get("normal", reset_normals)
|
||||
blender_mesh.normals_split_custom_set(tuple(zip(*(iter(reset_normals),) * 3)))
|
||||
blender_mesh.use_auto_smooth = True
|
||||
|
||||
|
||||
blender_mesh_object = bpy.data.objects.new(model.name, blender_mesh)
|
||||
|
||||
|
||||
# VERTEX GROUPS
|
||||
|
||||
vertex_groups_indicies = {}
|
||||
|
||||
for offset in vertex_weights_offsets:
|
||||
for i, weight_set in enumerate(vertex_weights_offsets[offset]):
|
||||
for weight in weight_set:
|
||||
index = weight.bone
|
||||
|
||||
if index not in vertex_groups_indicies:
|
||||
model_name = scene.models[index].name
|
||||
vertex_groups_indicies[index] = blender_mesh_object.vertex_groups.new(name=model_name)
|
||||
|
||||
vertex_groups_indicies[index].add([offset + i], weight.weight, 'ADD')
|
||||
|
||||
|
||||
return blender_mesh_object
|
||||
|
@@ -24,24 +24,37 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
|
||||
|
||||
models_list: List[Model] = []
|
||||
|
||||
# Composite bones are bones which have geometry.
|
||||
# If a child object has the same name, it will take said child's geometry.
|
||||
|
||||
# Pure bones are just bones and after all objects are explored the only
|
||||
# entries remaining in this dict will be bones without geometry.
|
||||
pure_bones_from_armature = {}
|
||||
armature_found = None
|
||||
|
||||
for uneval_obj in select_objects(export_target):
|
||||
if uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents:
|
||||
objects_to_export = select_objects(export_target)
|
||||
|
||||
for uneval_obj in objects_to_export:
|
||||
if uneval_obj.type == "ARMATURE":
|
||||
armature_found = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
|
||||
pure_bones_from_armature = expand_armature(armature_found)
|
||||
break
|
||||
|
||||
for uneval_obj in objects_to_export:
|
||||
if uneval_obj.type == "ARMATURE" or (uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents):
|
||||
continue
|
||||
|
||||
if apply_modifiers:
|
||||
obj = uneval_obj.evaluated_get(depsgraph)
|
||||
else:
|
||||
obj = uneval_obj
|
||||
obj = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
|
||||
|
||||
check_for_bad_lod_suffix(obj)
|
||||
|
||||
if obj.type == "ARMATURE":
|
||||
models_list += expand_armature(obj)
|
||||
armature_found = obj
|
||||
continue
|
||||
|
||||
# Test for a mesh object that is actually a BONE (shares name with bone_parent)
|
||||
# If so, we inject geometry into the BONE while not modifying it's transform/name
|
||||
if obj.parent_bone and obj.parent_bone in pure_bones_from_armature:
|
||||
model = pure_bones_from_armature[obj.parent_bone]
|
||||
# Since we found a composite bone, removed it from the dict of pure bones
|
||||
pure_bones_from_armature.pop(obj.parent_bone)
|
||||
else:
|
||||
model = Model()
|
||||
model.name = obj.name
|
||||
model.model_type = get_model_type(obj, skeleton_only)
|
||||
@@ -92,9 +105,11 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
|
||||
if get_is_collision_primitive(obj):
|
||||
model.collisionprimitive = get_collision_primitive(obj)
|
||||
|
||||
|
||||
models_list.append(model)
|
||||
|
||||
# We removed all composite bones after looking through the objects,
|
||||
# so the bones left are all pure and we add them all here.
|
||||
models_list += pure_bones_from_armature.values()
|
||||
|
||||
return (models_list, armature_found)
|
||||
|
||||
@@ -116,7 +131,7 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[Geomet
|
||||
""" Creates a list of GeometrySegment objects from a Blender mesh.
|
||||
Does NOT create triangle strips in the GeometrySegment however. """
|
||||
|
||||
if mesh.has_custom_normals:
|
||||
# We have to do this for all meshes to account for sharp edges
|
||||
mesh.calc_normals_split()
|
||||
|
||||
mesh.validate_material_indices()
|
||||
@@ -140,7 +155,7 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[Geomet
|
||||
for segment, material in zip(segments, mesh.materials):
|
||||
segment.material_name = material.name
|
||||
|
||||
def add_vertex(material_index: int, vertex_index: int, loop_index: int, use_smooth_normal: bool, face_normal: Vector) -> int:
|
||||
def add_vertex(material_index: int, vertex_index: int, loop_index: int) -> int:
|
||||
nonlocal segments, vertex_remap
|
||||
|
||||
vertex_cache_miss_index = -1
|
||||
@@ -148,15 +163,8 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[Geomet
|
||||
cache = vertex_cache[material_index]
|
||||
remap = vertex_remap[material_index]
|
||||
|
||||
vertex_normal: Vector
|
||||
|
||||
if use_smooth_normal or mesh.use_auto_smooth:
|
||||
if mesh.has_custom_normals:
|
||||
# always use loop normals since we always calculate a custom split set
|
||||
vertex_normal = Vector( mesh.loops[loop_index].normal )
|
||||
else:
|
||||
vertex_normal = Vector( mesh.vertices[vertex_index].normal )
|
||||
else:
|
||||
vertex_normal = Vector(face_normal)
|
||||
|
||||
def get_cache_vertex():
|
||||
yield mesh.vertices[vertex_index].co.x
|
||||
@@ -213,9 +221,9 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[Geomet
|
||||
for tri in mesh.loop_triangles:
|
||||
polygons[tri.material_index].add(tri.polygon_index)
|
||||
segments[tri.material_index].triangles.append([
|
||||
add_vertex(tri.material_index, tri.vertices[0], tri.loops[0], tri.use_smooth, tri.normal),
|
||||
add_vertex(tri.material_index, tri.vertices[1], tri.loops[1], tri.use_smooth, tri.normal),
|
||||
add_vertex(tri.material_index, tri.vertices[2], tri.loops[2], tri.use_smooth, tri.normal)])
|
||||
add_vertex(tri.material_index, tri.vertices[0], tri.loops[0]),
|
||||
add_vertex(tri.material_index, tri.vertices[1], tri.loops[1]),
|
||||
add_vertex(tri.material_index, tri.vertices[2], tri.loops[2])])
|
||||
|
||||
for segment, remap, polys in zip(segments, vertex_remap, polygons):
|
||||
for poly_index in polys:
|
||||
@@ -287,10 +295,7 @@ def get_collision_primitive(obj: bpy.types.Object) -> CollisionPrimitive:
|
||||
|
||||
primitive.radius = max(obj.dimensions[0], obj.dimensions[1], obj.dimensions[2]) * 0.5
|
||||
elif primitive.shape == CollisionPrimitiveShape.CYLINDER:
|
||||
if not math.isclose(obj.dimensions[0], obj.dimensions[1], rel_tol=0.001):
|
||||
raise RuntimeError(f"Object '{obj.name}' is being used as a cylinder collision "
|
||||
f"primitive but it's X and Y dimensions are not uniform!")
|
||||
primitive.radius = obj.dimensions[0] * 0.5
|
||||
primitive.radius = max(obj.dimensions[0], obj.dimensions[1]) * 0.5
|
||||
primitive.height = obj.dimensions[2]
|
||||
elif primitive.shape == CollisionPrimitiveShape.BOX:
|
||||
primitive.radius = obj.dimensions[0] * 0.5
|
||||
@@ -381,11 +386,17 @@ def select_objects(export_target: str) -> List[bpy.types.Object]:
|
||||
|
||||
|
||||
|
||||
def expand_armature(armature: bpy.types.Object) -> List[Model]:
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def expand_armature(armature: bpy.types.Object) -> Dict[str, Model]:
|
||||
|
||||
proper_BONES = get_real_BONES(armature)
|
||||
|
||||
bones: List[Model] = []
|
||||
bones: Dict[str, Model] = {}
|
||||
|
||||
for bone in armature.data.bones:
|
||||
model = Model()
|
||||
@@ -399,12 +410,26 @@ def expand_armature(armature: bpy.types.Object) -> List[Model]:
|
||||
# set model parent to SKIN object if there is one
|
||||
# set model parent to armature parent if there is one
|
||||
else:
|
||||
for child_obj in armature.children:
|
||||
|
||||
bone_world_matrix = get_bone_world_matrix(armature, bone.name)
|
||||
parent_obj = None
|
||||
|
||||
for child_obj in armature.original.children:
|
||||
if child_obj.vertex_groups and not get_is_model_hidden(child_obj) and not child_obj.parent_bone:
|
||||
model.parent = child_obj.name
|
||||
#model.parent = child_obj.name
|
||||
parent_obj = child_obj
|
||||
break
|
||||
if not model.parent and armature.parent:
|
||||
|
||||
if parent_obj:
|
||||
transform = parent_obj.matrix_world.inverted() @ bone_world_matrix
|
||||
model.parent = parent_obj.name
|
||||
elif not parent_obj and armature.parent:
|
||||
transform = armature.parent.matrix_world.inverted() @ bone_world_matrix
|
||||
model.parent = armature.parent.name
|
||||
else:
|
||||
transform = bone_world_matrix
|
||||
model.parent = ""
|
||||
|
||||
|
||||
|
||||
local_translation, local_rotation, _ = transform.decompose()
|
||||
@@ -414,6 +439,6 @@ def expand_armature(armature: bpy.types.Object) -> List[Model]:
|
||||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
model.transform.translation = convert_vector_space(local_translation)
|
||||
|
||||
bones.append(model)
|
||||
bones[bone.name] = model
|
||||
|
||||
return bones
|
||||
|
@@ -323,18 +323,23 @@ def _read_segm(segm: Reader, materials_list: List[Material]) -> GeometrySegment:
|
||||
for _ in range(num_texcoords):
|
||||
geometry_seg.texcoords.append(Vector(uv0l.read_f32(2)))
|
||||
|
||||
# TODO: Can't remember exact issue here...
|
||||
|
||||
# TODO: Can't remember exact issue here, but this chunk sometimes fails
|
||||
elif next_header == "NDXL":
|
||||
|
||||
with segm.read_child() as ndxl:
|
||||
pass
|
||||
'''
|
||||
|
||||
try:
|
||||
num_polygons = ndxl.read_u32()
|
||||
|
||||
for _ in range(num_polygons):
|
||||
polygon = ndxl.read_u16(ndxl.read_u16())
|
||||
num_inds = ndxl.read_u16()
|
||||
polygon = ndxl.read_u16(num_inds)
|
||||
geometry_seg.polygons.append(polygon)
|
||||
'''
|
||||
except:
|
||||
print("Failed to read polygon list!")
|
||||
geometry_seg.polygons = []
|
||||
|
||||
|
||||
elif next_header == "NDXT":
|
||||
with segm.read_child() as ndxt:
|
||||
@@ -343,46 +348,37 @@ def _read_segm(segm: Reader, materials_list: List[Material]) -> GeometrySegment:
|
||||
for _ in range(num_tris):
|
||||
geometry_seg.triangles.append(ndxt.read_u16(3))
|
||||
|
||||
# There could be major issues with this, so far it hasn't failed but its inelegance irks me
|
||||
# Try catch for safety's sake
|
||||
elif next_header == "STRP":
|
||||
strips : List[List[int]] = []
|
||||
|
||||
with segm.read_child() as strp:
|
||||
|
||||
try:
|
||||
num_indicies = strp.read_u32()
|
||||
|
||||
num_indicies_read = 0
|
||||
indices = strp.read_u16(num_indicies)
|
||||
|
||||
curr_strip = []
|
||||
previous_flag = False
|
||||
strip_indices = []
|
||||
|
||||
if num_indicies > 0:
|
||||
index, index1 = strp.read_u16(2)
|
||||
curr_strip = [index & 0x7fff, index1 & 0x7fff]
|
||||
num_indicies_read += 2
|
||||
for i in range(num_indicies - 1):
|
||||
if indices[i] & 0x8000 > 0 and indices[i+1] & 0x8000 > 0:
|
||||
strip_indices.append(i)
|
||||
|
||||
for i in range(num_indicies - 2):
|
||||
index = strp.read_u16(1)
|
||||
strip_indices.append(num_indicies)
|
||||
|
||||
if index & 0x8000 > 0:
|
||||
index = index & 0x7fff
|
||||
for i in range(len(strip_indices) - 1):
|
||||
start = strip_indices[i]
|
||||
end = strip_indices[i+1]
|
||||
|
||||
if previous_flag:
|
||||
previous_flag = False
|
||||
curr_strip.append(index)
|
||||
strips.append(curr_strip[:-2])
|
||||
curr_strip = curr_strip[-2:]
|
||||
continue
|
||||
else:
|
||||
previous_flag = True
|
||||
|
||||
else:
|
||||
previous_flag = False
|
||||
|
||||
curr_strip.append(index)
|
||||
strips.append(list([indices[start] & 0x7fff, indices[start+1] & 0x7fff]) + list(indices[start+2 : end]))
|
||||
except:
|
||||
print("Failed to read triangle strips")
|
||||
geometry_seg.triangle_strips = []
|
||||
|
||||
geometry_seg.triangle_strips = strips
|
||||
|
||||
# TODO: Dont know how to handle trailing 0 bug yet: https://schlechtwetterfront.github.io/ze_filetypes/msh.html#STRP
|
||||
# TODO: Dont know if/how to handle trailing 0 bug yet: https://schlechtwetterfront.github.io/ze_filetypes/msh.html#STRP
|
||||
#if segm.read_u16 != 0:
|
||||
# segm.skip_bytes(-2)
|
||||
|
||||
|
200
addons/io_scene_swbf_msh/msh_scene_to_blend.py
Normal file
200
addons/io_scene_swbf_msh/msh_scene_to_blend.py
Normal file
@@ -0,0 +1,200 @@
|
||||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_skeleton_to_blend import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
from .msh_mesh_to_blend import model_to_mesh_object
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
|
||||
# Create the msh hierachy. Armatures are not created here.
|
||||
def extract_models(scene: Scene, materials_map : Dict[str, bpy.types.Material]) -> Dict[str, bpy.types.Object]:
|
||||
|
||||
# This will be filled with model names -> Blender objects and returned
|
||||
model_map : Dict[str, bpy.types.Object] = {}
|
||||
|
||||
sorted_models : List[Model] = sort_by_parent(scene.models)
|
||||
|
||||
for model in sorted_models:
|
||||
|
||||
new_obj = None
|
||||
|
||||
if model.geometry:
|
||||
|
||||
new_obj = model_to_mesh_object(model, scene, materials_map)
|
||||
|
||||
else:
|
||||
|
||||
new_obj = bpy.data.objects.new(model.name, None)
|
||||
new_obj.empty_display_size = 1
|
||||
new_obj.empty_display_type = 'PLAIN_AXES'
|
||||
|
||||
|
||||
model_map[model.name] = new_obj
|
||||
new_obj.name = model.name
|
||||
|
||||
if model.parent:
|
||||
new_obj.parent = model_map[model.parent]
|
||||
|
||||
new_obj.location = convert_vector_space(model.transform.translation)
|
||||
new_obj.rotation_mode = "QUATERNION"
|
||||
new_obj.rotation_quaternion = convert_rotation_space(model.transform.rotation)
|
||||
|
||||
if model.collisionprimitive is not None:
|
||||
new_obj.swbf_msh_coll_prim.prim_type = model.collisionprimitive.shape.value
|
||||
|
||||
bpy.context.collection.objects.link(new_obj)
|
||||
|
||||
|
||||
return model_map
|
||||
|
||||
|
||||
# TODO: Add to custom material info struct, maybe some material conversion/import?
|
||||
def extract_materials(folder_path: str, scene: Scene) -> Dict[str, bpy.types.Material]:
|
||||
|
||||
extracted_materials : Dict[str, bpy.types.Material] = {}
|
||||
|
||||
for material_name, material in scene.materials.items():
|
||||
|
||||
extracted_materials[material_name] = swbf_material_to_blend(material_name, material, folder_path)
|
||||
|
||||
return extracted_materials
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def extract_scene(filepath: str, scene: Scene):
|
||||
|
||||
folder = os.path.join(os.path.dirname(filepath),"")
|
||||
|
||||
# material_map mapes Material names to Blender materials
|
||||
material_map = extract_materials(folder, scene)
|
||||
|
||||
# model_map maps Model names to Blender objects.
|
||||
model_map = extract_models(scene, material_map)
|
||||
|
||||
|
||||
# skel contains all models needed in an armature
|
||||
skel = extract_required_skeleton(scene)
|
||||
|
||||
# Create the armature if skel is non-empty
|
||||
armature = None if not skel else required_skeleton_to_armature(skel, model_map, scene)
|
||||
|
||||
if armature is not None:
|
||||
preserved_skel = armature.data.swbf_msh_skel
|
||||
for model in scene.models:
|
||||
if to_crc(model.name) in scene.skeleton or model.model_type == ModelType.BONE:
|
||||
entry = preserved_skel.add()
|
||||
entry.name = model.name
|
||||
|
||||
|
||||
'''
|
||||
If an armature was created, we need to do a few extra
|
||||
things to ensure the import makes sense in Blender. It can
|
||||
get a bit messy, as XSI + SWBF have very loose requirements
|
||||
when it comes to skin-skeleton parentage.
|
||||
|
||||
If not, we're good.
|
||||
'''
|
||||
if armature is not None:
|
||||
|
||||
has_skin = False
|
||||
|
||||
# Handle armature related parenting
|
||||
for curr_model in scene.models:
|
||||
|
||||
curr_obj = model_map[curr_model.name]
|
||||
|
||||
# Parent all skins to armature
|
||||
if curr_model.model_type == ModelType.SKIN:
|
||||
|
||||
has_skin = True
|
||||
|
||||
worldmat = curr_obj.matrix_world
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'ARMATURE'
|
||||
curr_obj.matrix_world = worldmat
|
||||
|
||||
# Parent the object to a bone if necessary
|
||||
else:
|
||||
parent_bone_name = ""
|
||||
if curr_model.name in armature.data.bones and curr_model.geometry:
|
||||
parent_bone_name = curr_model.name
|
||||
elif curr_model.parent in armature.data.bones and curr_model.name not in armature.data.bones:
|
||||
parent_bone_name = curr_model.parent
|
||||
|
||||
if parent_bone_name:
|
||||
# Not sure what the different mats do, but saving the worldmat and
|
||||
# applying it after clearing the other mats yields correct results...
|
||||
worldmat = curr_obj.matrix_world
|
||||
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'BONE'
|
||||
curr_obj.parent_bone = parent_bone_name
|
||||
# ''
|
||||
curr_obj.matrix_basis = Matrix()
|
||||
curr_obj.matrix_parent_inverse = Matrix()
|
||||
curr_obj.matrix_world = worldmat
|
||||
|
||||
|
||||
'''
|
||||
Sometimes skins are parented to other skins. We need to find the skin highest in the hierarchy and
|
||||
parent all skins to its parent (armature_reparent_obj).
|
||||
|
||||
If not skin exists, we just reparent the armature to the parent of the highest node in the skeleton
|
||||
'''
|
||||
armature_reparent_obj = None
|
||||
if has_skin:
|
||||
for model in sort_by_parent(scene.models):
|
||||
if model.model_type == ModelType.SKIN:
|
||||
armature_reparent_obj = None if not model.parent else model_map[model.parent]
|
||||
else:
|
||||
skeleton_parent_name = skel[0].parent
|
||||
for model in scene.models:
|
||||
if model.name == skeleton_parent_name:
|
||||
armature_reparent_obj = None if not skeleton_parent_name else model_map[skeleton_parent_name]
|
||||
|
||||
# Now we reparent the armature to the node (armature_reparent_obj) we just found
|
||||
if armature_reparent_obj is not None and armature.name != armature_reparent_obj.name:
|
||||
world_tx = armature.matrix_world
|
||||
armature.parent = armature_reparent_obj
|
||||
armature.matrix_basis = Matrix()
|
||||
armature.matrix_parent_inverse = Matrix()
|
||||
armature.matrix_world = Matrix.Identity(4)
|
||||
|
||||
|
||||
# If an bone exists in the armature, delete its
|
||||
# object counterpart (as created in extract_models)
|
||||
for bone in skel:
|
||||
model_to_remove = model_map[bone.name]
|
||||
if model_to_remove and model_to_remove.parent_bone == "":
|
||||
bpy.data.objects.remove(model_to_remove, do_unlink=True)
|
||||
model_map.pop(bone.name)
|
||||
|
||||
armature.matrix_world = Matrix.Identity(4)
|
||||
|
||||
|
||||
# Lastly, hide all that is hidden in the msh scene
|
||||
for model in scene.models:
|
||||
if model.name in model_map:
|
||||
obj = model_map[model.name]
|
||||
if get_is_model_hidden(obj) and len(obj.children) == 0:
|
||||
obj.hide_set(True)
|
||||
|
@@ -19,7 +19,21 @@ from .msh_anim_gather import extract_anim
|
||||
|
||||
|
||||
|
||||
def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_target: str, skel_only: bool, export_anim: bool) -> Scene:
|
||||
def set_scene_animation(scene : Scene, armature_obj : bpy.types.Object):
|
||||
|
||||
if not scene or not armature_obj:
|
||||
return
|
||||
|
||||
root = scene.models[0]
|
||||
scene.animation = extract_anim(armature_obj, root.name)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_target: str, skel_only: bool) -> Tuple[Scene, bpy.types.Object]:
|
||||
""" Create a msh Scene from the active Blender scene. """
|
||||
|
||||
scene = Scene()
|
||||
@@ -47,17 +61,11 @@ def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_t
|
||||
|
||||
root = scene.models[0]
|
||||
|
||||
if export_anim:
|
||||
if armature_obj is not None:
|
||||
scene.animation = extract_anim(armature_obj, root.name)
|
||||
else:
|
||||
raise Exception("Export Error: Could not find an armature object from which to export an animation!")
|
||||
|
||||
if skel_only and root.model_type == ModelType.NULL:
|
||||
if skel_only and (root.model_type == ModelType.NULL or root.model_type == ModelType.BONE):
|
||||
# For ZenAsset
|
||||
inject_dummy_data(root)
|
||||
|
||||
return scene
|
||||
return scene, armature_obj
|
||||
|
||||
|
||||
def create_scene_aabb(scene: Scene) -> SceneAABB:
|
||||
|
178
addons/io_scene_swbf_msh/msh_skeleton_to_blend.py
Normal file
178
addons/io_scene_swbf_msh/msh_skeleton_to_blend.py
Normal file
@@ -0,0 +1,178 @@
|
||||
""" SWBF skeleton-armature mapping functions. By skeleton, we simply
|
||||
mean models that will end up in an armature. Literal SWBF skeletons (zafbins)
|
||||
are not relevant as of now. """
|
||||
|
||||
import bpy
|
||||
import math
|
||||
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_model import *
|
||||
from .msh_model_utilities import *
|
||||
|
||||
from .crc import *
|
||||
|
||||
|
||||
'''
|
||||
Creates armature from the required nodes.
|
||||
Assumes the required_skeleton is already sorted by parent.
|
||||
|
||||
Uses model_map to get the world matrix of each bone (hacky, see NOTE)
|
||||
'''
|
||||
def required_skeleton_to_armature(required_skeleton : List[Model], model_map : Dict[str, bpy.types.Object], msh_scene : Scene) -> bpy.types.Object:
|
||||
|
||||
armature = bpy.data.armatures.new("skeleton")
|
||||
armature_obj = bpy.data.objects.new("skeleton", armature)
|
||||
armature_obj.matrix_world = Matrix.Identity(4)
|
||||
bpy.context.view_layer.active_layer_collection.collection.objects.link(armature_obj)
|
||||
|
||||
|
||||
bones_set = set([model.name for model in required_skeleton])
|
||||
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = armature_obj
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in required_skeleton:
|
||||
|
||||
edit_bone = armature.edit_bones.new(bone.name)
|
||||
|
||||
if bone.parent and bone.parent in bones_set:
|
||||
edit_bone.parent = armature.edit_bones[bone.parent]
|
||||
|
||||
'''
|
||||
NOTE: I recall there being some rare issue with the get_world_matrix utility func.
|
||||
Never bothered to figure it out and referencing the bone object's world mat always works.
|
||||
Bone objects will be deleted later.
|
||||
'''
|
||||
bone_obj = model_map[bone.name]
|
||||
|
||||
# TODO: This will lead to mistranslated bones when armature is reparented!
|
||||
edit_bone.matrix = bone_obj.matrix_world
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,1.0,0.0))
|
||||
|
||||
bone_children = [b for b in get_model_children(bone, required_skeleton)]
|
||||
|
||||
'''
|
||||
Perhaps we'll add an option for importing bones tip-to-tail, but that would
|
||||
require preserving their original transforms as changing the tail position
|
||||
changes the bones' transform...
|
||||
'''
|
||||
tail_pos = Vector()
|
||||
if bone_children:
|
||||
for bone_child in bone_children:
|
||||
tail_pos += bone_obj.matrix_world.translation
|
||||
tail_pos = tail_pos / len(bone_children)
|
||||
edit_bone.length = .5 #(tail_pos - edit_bone.head).magnitude
|
||||
else:
|
||||
bone_length = .5# edit_bone.parent.length if edit_bone.parent is not None else .5
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,bone_length,0.0))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
return armature_obj
|
||||
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Ok, so this method is crucial. What this does is:
|
||||
1) Find all nodes that are weighted to by skinned segments.
|
||||
2) A node must be included in the armature if it:
|
||||
- is in SKL2 and is not the scene root
|
||||
- has model_type == BONE
|
||||
- is weighted to
|
||||
- has a parent and child that must be in the armature
|
||||
|
||||
This may need a lot of adjustments, don't think I can prove it's validity but it has worked very well
|
||||
and handles all stock + ZETools + Pandemic XSI exporter models I've tested
|
||||
'''
|
||||
def extract_required_skeleton(scene: Scene) -> List[Model]:
|
||||
|
||||
# Will map Model names to Models in scene, for convenience
|
||||
model_dict : Dict[str, Model] = {}
|
||||
|
||||
'''
|
||||
Will contain hashes of all models that definitely need to be in the skeleton/armature.
|
||||
We initialize it with the contents of SKL2 i.e. the nodes that are animated.
|
||||
For now this includes the scene root, but that'll be excluded later.
|
||||
'''
|
||||
skeleton_hashes = set(scene.skeleton)
|
||||
|
||||
'''
|
||||
We also need to add all nodes that are weighted to. These are not necessarily in
|
||||
SKL2, as SKL2 seems to only reference nodes that are keyframed.
|
||||
However, sometimes SKL2 is not included when it should be, but it can be mostly recovered
|
||||
by checking which models are BONEs.
|
||||
'''
|
||||
for model in scene.models:
|
||||
model_dict[model.name] = model
|
||||
|
||||
if model.model_type == ModelType.BONE:
|
||||
skeleton_hashes.add(to_crc(model.name))
|
||||
|
||||
elif model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.weights:
|
||||
for weight_set in seg.weights:
|
||||
for weight in weight_set:
|
||||
model_weighted_to = scene.models[weight.bone]
|
||||
|
||||
if to_crc(model_weighted_to.name) not in skeleton_hashes:
|
||||
skeleton_hashes.add(to_crc(model_weighted_to.name))
|
||||
|
||||
# The result of this function (to be sorted by parent)
|
||||
required_skeleton_models = []
|
||||
|
||||
# Set of nodes to be included in required skeleton/were visited
|
||||
visited_nodes = set()
|
||||
|
||||
'''
|
||||
Here we add all skeleton nodes (except root) and any necessary ancestors to the armature.
|
||||
- e.g. in bone_x/eff_x/eff_y, the effectors do not have to be in armature, as they are not ancestors of a bone
|
||||
- but in bone_x/eff_x/eff_y/bone_y, they do.
|
||||
'''
|
||||
for bone in sort_by_parent(scene.models):
|
||||
|
||||
# make sure we exclude the scene root and any nodes irrelevant to the armature
|
||||
if not bone.parent or to_crc(bone.name) not in skeleton_hashes:
|
||||
continue
|
||||
|
||||
potential_bones = [bone]
|
||||
visited_nodes.add(bone.name)
|
||||
|
||||
# Stacked transform will be needed if we decide to include an option for excluding effectors/roots or
|
||||
# connecting bones tip-to-tail
|
||||
#stacked_transform = model_transform_to_matrix(bone.transform)
|
||||
|
||||
curr_ancestor = model_dict[bone.parent]
|
||||
|
||||
while True:
|
||||
|
||||
# If we hit a non-skin scene root, that means we just add the bone we started with, no ancestors.
|
||||
if not curr_ancestor.parent and curr_ancestor.model_type != ModelType.SKIN:
|
||||
required_skeleton_models.append(bone)
|
||||
visited_nodes.add(bone.name)
|
||||
break
|
||||
|
||||
# If we encounter another bone, a skin, or a previously visited object, we need to add the bone and its
|
||||
# ancestors.
|
||||
elif to_crc(curr_ancestor.name) in scene.skeleton or curr_ancestor.model_type == ModelType.SKIN or curr_ancestor.name in visited_nodes:
|
||||
for potential_bone in potential_bones:
|
||||
required_skeleton_models.append(potential_bone)
|
||||
visited_nodes.add(potential_bone.name)
|
||||
break
|
||||
|
||||
# Add ancestor to potential bones, update next ancestor
|
||||
else:
|
||||
if curr_ancestor.name not in visited_nodes:
|
||||
potential_bones.insert(0, curr_ancestor)
|
||||
curr_ancestor = model_dict[curr_ancestor.parent]
|
||||
|
||||
#stacked_transform = model_transform_to_matrix(curr_ancestor.transform) @ stacked_transform
|
||||
|
||||
return required_skeleton_models
|
||||
|
@@ -1,4 +1,4 @@
|
||||
""" Helpers for SWBF skeleton-armature mapping """
|
||||
""" Armature -> SWBF skeleton mapping functions """
|
||||
|
||||
import bpy
|
||||
import math
|
||||
@@ -12,9 +12,11 @@ from .msh_model_utilities import *
|
||||
from .crc import *
|
||||
|
||||
|
||||
def has_preserved_skeleton(armature : bpy.types.Armature):
|
||||
return len(armature.data.swbf_msh_skel) > 0
|
||||
|
||||
def get_bone_world_matrix(armature: bpy.types.Object, bone_name: str) -> Matrix:
|
||||
if bone_name in armature.data.bones:
|
||||
return armature.matrix_world @ armature.data.bones[bone_name].matrix_local
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
'''Returns all bones that should be marked as BONE'''
|
||||
@@ -33,179 +35,13 @@ def get_real_BONES(armature: bpy.types.Armature) -> Set[str]:
|
||||
|
||||
if len(skel_props) > 0:
|
||||
for bone in skel_props:
|
||||
#print(f"{bone.name} is a real BONE")
|
||||
real_bones.add(bone.name)
|
||||
elif action:
|
||||
if action:
|
||||
for group in armature.animation_data.action.groups:
|
||||
#print(f"{group.name} is a real BONE")
|
||||
real_bones.add(group.name)
|
||||
else:
|
||||
|
||||
if len(skel_props) == 0 and action is None:
|
||||
for bone in armature.data.bones:
|
||||
#print(f"{bone.name} is a real BONE")
|
||||
real_bones.add(bone.name)
|
||||
|
||||
return real_bones
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Creates armature from the required nodes.
|
||||
Assumes the required_skeleton is already sorted by parent.
|
||||
|
||||
Uses model_map to get the world matrix of each bone (hacky, see NOTE)
|
||||
'''
|
||||
def required_skeleton_to_armature(required_skeleton : List[Model], model_map : Dict[str, bpy.types.Object], msh_scene : Scene) -> bpy.types.Object:
|
||||
|
||||
armature = bpy.data.armatures.new("skeleton")
|
||||
armature_obj = bpy.data.objects.new("skeleton", armature)
|
||||
bpy.context.view_layer.active_layer_collection.collection.objects.link(armature_obj)
|
||||
|
||||
|
||||
bones_set = set([model.name for model in required_skeleton])
|
||||
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = armature_obj
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for bone in required_skeleton:
|
||||
|
||||
edit_bone = armature.edit_bones.new(bone.name)
|
||||
|
||||
if bone.parent and bone.parent in bones_set:
|
||||
edit_bone.parent = armature.edit_bones[bone.parent]
|
||||
|
||||
'''
|
||||
NOTE: I recall there being some rare issue with the get_world_matrix utility func.
|
||||
Never bothered to figure it out and referencing the bone object's world mat always works.
|
||||
Bone objects will be deleted later.
|
||||
'''
|
||||
bone_obj = model_map[bone.name]
|
||||
|
||||
edit_bone.matrix = bone_obj.matrix_world
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,1.0,0.0))
|
||||
|
||||
bone_children = [b for b in get_model_children(bone, required_skeleton)]
|
||||
|
||||
'''
|
||||
Perhaps we'll add an option for importing bones tip-to-tail, but that would
|
||||
require preserving their original transforms as changing the tail position
|
||||
changes the bones' transform...
|
||||
'''
|
||||
tail_pos = Vector()
|
||||
if bone_children:
|
||||
for bone_child in bone_children:
|
||||
tail_pos += bone_obj.matrix_world.translation
|
||||
tail_pos = tail_pos / len(bone_children)
|
||||
edit_bone.length = .5 #(tail_pos - edit_bone.head).magnitude
|
||||
else:
|
||||
bone_length = .5# edit_bone.parent.length if edit_bone.parent is not None else .5
|
||||
edit_bone.tail = bone_obj.matrix_world @ Vector((0.0,bone_length,0.0))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
armature_obj.select_set(True)
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
return armature_obj
|
||||
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Ok, so this method is crucial. What this does is:
|
||||
1) Find all nodes that are weighted to by skinned segments.
|
||||
2) A node must be included in the armature if it:
|
||||
- is in SKL2 and is not the scene root
|
||||
- has model_type == BONE
|
||||
- is weighted to
|
||||
- has a parent and child that must be in the armature
|
||||
|
||||
This may need a lot of adjustments, don't think I can prove it's validity but it has worked very well
|
||||
and handles all stock + ZETools + Pandemic XSI exporter models I've tested
|
||||
'''
|
||||
def extract_required_skeleton(scene: Scene) -> List[Model]:
|
||||
|
||||
# Will map Model names to Models in scene, for convenience
|
||||
model_dict : Dict[str, Model] = {}
|
||||
|
||||
'''
|
||||
Will contain hashes of all models that definitely need to be in the skeleton/armature.
|
||||
We initialize it with the contents of SKL2 i.e. the nodes that are animated.
|
||||
For now this includes the scene root, but that'll be excluded later.
|
||||
'''
|
||||
skeleton_hashes = set(scene.skeleton)
|
||||
|
||||
'''
|
||||
We also need to add all nodes that are weighted to. These are not necessarily in
|
||||
SKL2, as SKL2 seems to only reference nodes that are keyframed.
|
||||
However, sometimes SKL2 is not included when it should be, but it can be mostly recovered
|
||||
by checking which models are BONEs.
|
||||
'''
|
||||
for model in scene.models:
|
||||
model_dict[model.name] = model
|
||||
|
||||
if model.model_type == ModelType.BONE:
|
||||
skeleton_hashes.add(to_crc(model.name))
|
||||
|
||||
elif model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.weights:
|
||||
for weight_set in seg.weights:
|
||||
for weight in weight_set:
|
||||
model_weighted_to = scene.models[weight.bone]
|
||||
|
||||
if to_crc(model_weighted_to.name) not in skeleton_hashes:
|
||||
skeleton_hashes.add(to_crc(model_weighted_to.name))
|
||||
|
||||
# The result of this function (to be sorted by parent)
|
||||
required_skeleton_models = []
|
||||
|
||||
# Set of nodes to be included in required skeleton/were visited
|
||||
visited_nodes = set()
|
||||
|
||||
'''
|
||||
Here we add all skeleton nodes (except root) and any necessary ancestors to the armature.
|
||||
- e.g. in bone_x/eff_x/eff_y, the effectors do not have to be in armature, as they are not ancestors of a bone
|
||||
- but in bone_x/eff_x/eff_y/bone_y, they do.
|
||||
'''
|
||||
for bone in sort_by_parent(scene.models):
|
||||
|
||||
# make sure we exclude the scene root and any nodes irrelevant to the armature
|
||||
if not bone.parent or to_crc(bone.name) not in skeleton_hashes:
|
||||
continue
|
||||
|
||||
potential_bones = [bone]
|
||||
visited_nodes.add(bone.name)
|
||||
|
||||
# Stacked transform will be needed if we decide to include an option for excluding effectors/roots or
|
||||
# connecting bones tip-to-tail
|
||||
#stacked_transform = model_transform_to_matrix(bone.transform)
|
||||
|
||||
curr_ancestor = model_dict[bone.parent]
|
||||
|
||||
while True:
|
||||
|
||||
# If we hit a non-skin scene root, that means we just add the bone we started with, no ancestors.
|
||||
if not curr_ancestor.parent and curr_ancestor.model_type != ModelType.SKIN:
|
||||
required_skeleton_models.append(bone)
|
||||
visited_nodes.add(bone.name)
|
||||
break
|
||||
|
||||
# If we encounter another bone, a skin, or a previously visited object, we need to add the bone and its
|
||||
# ancestors.
|
||||
elif to_crc(curr_ancestor.name) in scene.skeleton or curr_ancestor.model_type == ModelType.SKIN or curr_ancestor.name in visited_nodes:
|
||||
for potential_bone in potential_bones:
|
||||
required_skeleton_models.append(potential_bone)
|
||||
visited_nodes.add(potential_bone.name)
|
||||
break
|
||||
|
||||
# Add ancestor to potential bones, update next ancestor
|
||||
else:
|
||||
if curr_ancestor.name not in visited_nodes:
|
||||
potential_bones.insert(0, curr_ancestor)
|
||||
curr_ancestor = model_dict[curr_ancestor.parent]
|
||||
|
||||
#stacked_transform = model_transform_to_matrix(curr_ancestor.transform) @ stacked_transform
|
||||
|
||||
return required_skeleton_models
|
||||
|
@@ -1,374 +0,0 @@
|
||||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
|
||||
# Extracts and applies anims in the scene to the currently selected armature
|
||||
def extract_and_apply_anim(filename : str, scene : Scene):
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
|
||||
if not arma or arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if scene.animation is None:
|
||||
raise Exception("No animation found in msh file!")
|
||||
|
||||
else:
|
||||
head, tail = os.path.split(filename)
|
||||
anim_name = tail.split(".")[0]
|
||||
action = bpy.data.actions.new(anim_name)
|
||||
action.use_fake_user = True
|
||||
|
||||
if not arma.animation_data:
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
# Record the starting transforms of each bone. Pose space is relative
|
||||
# to bones starting transforms. Starting = in edit mode
|
||||
bone_bind_poses = {}
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if edit_bone.parent:
|
||||
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bone_local = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bone_local.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
for bone in arma.pose.bones:
|
||||
if to_crc(bone.name) in scene.animation.bone_frames:
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
|
||||
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
|
||||
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
for frame in rotation_frames:
|
||||
i = frame.index
|
||||
q = (bind_mat @ convert_rotation_space(frame.rotation).to_matrix().to_4x4()).to_quaternion()
|
||||
|
||||
fcurve_rot_w.keyframe_points.insert(i,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(i,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(i,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(i,q.z)
|
||||
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in translation_frames:
|
||||
i = frame.index
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(frame.translation))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(i,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(i,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(i,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Create the msh hierachy. Armatures are not created here. Much of this could use some optimization...
|
||||
def extract_models(scene: Scene, materials_map : Dict[str, bpy.types.Material]) -> Dict[str, bpy.types.Object]:
|
||||
|
||||
# This will be filled with model names -> Blender objects and returned
|
||||
model_map : Dict[str, bpy.types.Object] = {}
|
||||
|
||||
sorted_models : List[Model] = sort_by_parent(scene.models)
|
||||
|
||||
for model in sorted_models:
|
||||
new_obj = None
|
||||
|
||||
|
||||
if model.model_type == ModelType.STATIC or model.model_type == ModelType.SKIN or model.model_type == ModelType.SHADOWVOLUME:
|
||||
|
||||
new_mesh = bpy.data.meshes.new(model.name)
|
||||
verts = []
|
||||
faces = []
|
||||
offset = 0
|
||||
|
||||
full_texcoords = []
|
||||
|
||||
weights_offsets = {}
|
||||
|
||||
face_range_to_material_index = []
|
||||
|
||||
if model.geometry:
|
||||
|
||||
#if model.collisionprimitive is None:
|
||||
# print(f"On model: {model.name}")
|
||||
|
||||
for i,seg in enumerate(model.geometry):
|
||||
|
||||
verts += [tuple(convert_vector_space(v)) for v in seg.positions]
|
||||
|
||||
#if model.collisionprimitive is None:
|
||||
# print("Importing segment with material: {} with and {} verts".format(seg.material_name, len(seg.positions)))
|
||||
|
||||
if seg.weights:
|
||||
weights_offsets[offset] = seg.weights
|
||||
|
||||
if seg.texcoords is not None:
|
||||
full_texcoords += seg.texcoords
|
||||
else:
|
||||
full_texcoords += [(0.0,0.0) for _ in range(len(seg.positions))]
|
||||
|
||||
face_range_lower = len(faces)
|
||||
|
||||
if seg.triangles:
|
||||
faces += [tuple([ind + offset for ind in tri]) for tri in seg.triangles]
|
||||
else:
|
||||
for strip in seg.triangle_strips:
|
||||
for i in range(len(strip) - 2):
|
||||
face = tuple([offset + strip[j] for j in range(i,i+3)])
|
||||
faces.append(face)
|
||||
|
||||
face_range_upper = len(faces)
|
||||
face_range_to_material_index.append((face_range_lower, face_range_upper, i))
|
||||
|
||||
offset += len(seg.positions)
|
||||
|
||||
new_mesh.from_pydata(verts, [], faces)
|
||||
new_mesh.update()
|
||||
new_mesh.validate()
|
||||
|
||||
|
||||
# If tex coords are present, add material and UV data
|
||||
if full_texcoords:
|
||||
|
||||
edit_mesh = bmesh.new()
|
||||
edit_mesh.from_mesh(new_mesh)
|
||||
|
||||
uvlayer = edit_mesh.loops.layers.uv.verify()
|
||||
|
||||
for edit_mesh_face in edit_mesh.faces:
|
||||
face_index = edit_mesh_face.index
|
||||
mesh_face = faces[face_index]
|
||||
|
||||
for frL, frU, ind in face_range_to_material_index:
|
||||
if face_index >= frL and face_index < frU:
|
||||
edit_mesh_face.material_index = ind
|
||||
|
||||
for i,loop in enumerate(edit_mesh_face.loops):
|
||||
|
||||
texcoord = full_texcoords[mesh_face[i]]
|
||||
loop[uvlayer].uv = tuple([texcoord.x, texcoord.y])
|
||||
|
||||
edit_mesh.to_mesh(new_mesh)
|
||||
edit_mesh.free()
|
||||
|
||||
|
||||
new_obj = bpy.data.objects.new(new_mesh.name, new_mesh)
|
||||
|
||||
|
||||
vertex_groups_indicies = {}
|
||||
|
||||
for offset in weights_offsets:
|
||||
for i, weight_set in enumerate(weights_offsets[offset]):
|
||||
for weight in weight_set:
|
||||
index = weight.bone
|
||||
|
||||
if index not in vertex_groups_indicies:
|
||||
model_name = scene.models[index].name
|
||||
vertex_groups_indicies[index] = new_obj.vertex_groups.new(name=model_name)
|
||||
|
||||
vertex_groups_indicies[index].add([offset + i], weight.weight, 'ADD')
|
||||
|
||||
|
||||
'''
|
||||
Assign Material slots
|
||||
'''
|
||||
if model.geometry:
|
||||
for seg in model.geometry:
|
||||
if seg.material_name:
|
||||
material = materials_map[seg.material_name]
|
||||
new_obj.data.materials.append(material)
|
||||
|
||||
else:
|
||||
|
||||
new_obj = bpy.data.objects.new(model.name, None)
|
||||
new_obj.empty_display_size = 1
|
||||
new_obj.empty_display_type = 'PLAIN_AXES'
|
||||
|
||||
|
||||
model_map[model.name] = new_obj
|
||||
|
||||
if model.parent:
|
||||
new_obj.parent = model_map[model.parent]
|
||||
|
||||
new_obj.location = convert_vector_space(model.transform.translation)
|
||||
new_obj.rotation_mode = "QUATERNION"
|
||||
new_obj.rotation_quaternion = convert_rotation_space(model.transform.rotation)
|
||||
|
||||
if model.collisionprimitive is not None:
|
||||
new_obj.swbf_msh_coll_prim.prim_type = model.collisionprimitive.shape.value
|
||||
|
||||
bpy.context.collection.objects.link(new_obj)
|
||||
|
||||
|
||||
return model_map
|
||||
|
||||
|
||||
# TODO: Add to custom material info struct, maybe some material conversion/import?
|
||||
def extract_materials(folder_path: str, scene: Scene) -> Dict[str, bpy.types.Material]:
|
||||
|
||||
extracted_materials : Dict[str, bpy.types.Material] = {}
|
||||
|
||||
for material_name, material in scene.materials.items():
|
||||
|
||||
new_mat = bpy.data.materials.new(name=material_name)
|
||||
new_mat.use_nodes = True
|
||||
bsdf = new_mat.node_tree.nodes["Principled BSDF"]
|
||||
|
||||
diffuse_texture_path = find_texture_path(folder_path, material.texture0)
|
||||
|
||||
if diffuse_texture_path:
|
||||
texImage = new_mat.node_tree.nodes.new('ShaderNodeTexImage')
|
||||
texImage.image = bpy.data.images.load(diffuse_texture_path)
|
||||
new_mat.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
|
||||
|
||||
fill_material_props(material, new_mat.swbf_msh_mat)
|
||||
|
||||
extracted_materials[material_name] = new_mat
|
||||
|
||||
return extracted_materials
|
||||
|
||||
|
||||
|
||||
def extract_scene(filepath: str, scene: Scene):
|
||||
|
||||
folder = os.path.join(os.path.dirname(filepath),"")
|
||||
|
||||
# material_map mapes Material names to Blender materials
|
||||
material_map = extract_materials(folder, scene)
|
||||
|
||||
# model_map maps Model names to Blender objects.
|
||||
model_map = extract_models(scene, material_map)
|
||||
|
||||
|
||||
# skel contains all models needed in an armature
|
||||
skel = extract_required_skeleton(scene)
|
||||
|
||||
# Create the armature if skel is non-empty
|
||||
armature = None if not skel else required_skeleton_to_armature(skel, model_map, scene)
|
||||
|
||||
if armature is not None:
|
||||
preserved_skel = armature.data.swbf_msh_skel
|
||||
for model in scene.models:
|
||||
if to_crc(model.name) in scene.skeleton or model.model_type == ModelType.BONE:
|
||||
entry = preserved_skel.add()
|
||||
entry.name = model.name
|
||||
|
||||
|
||||
'''
|
||||
If an armature was created, we need to do a few extra
|
||||
things to ensure the import makes sense in Blender. It can
|
||||
get a bit messy, as XSI + SWBF have very loose requirements
|
||||
when it comes to skin-skeleton parentage.
|
||||
|
||||
If not, we're good.
|
||||
'''
|
||||
if armature is not None:
|
||||
|
||||
has_skin = False
|
||||
|
||||
# Handle armature related parenting
|
||||
for curr_model in scene.models:
|
||||
|
||||
curr_obj = model_map[curr_model.name]
|
||||
|
||||
# Parent all skins to armature
|
||||
if curr_model.model_type == ModelType.SKIN:
|
||||
|
||||
has_skin = True
|
||||
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'ARMATURE'
|
||||
|
||||
# Parent the object to a bone if necessary
|
||||
else:
|
||||
if curr_model.parent in armature.data.bones and curr_model.name not in armature.data.bones:
|
||||
# Not sure what the different mats do, but saving the worldmat and
|
||||
# applying it after clearing the other mats yields correct results...
|
||||
worldmat = curr_obj.matrix_world
|
||||
|
||||
curr_obj.parent = armature
|
||||
curr_obj.parent_type = 'BONE'
|
||||
curr_obj.parent_bone = curr_model.parent
|
||||
# ''
|
||||
curr_obj.matrix_basis = Matrix()
|
||||
curr_obj.matrix_parent_inverse = Matrix()
|
||||
curr_obj.matrix_world = worldmat
|
||||
|
||||
'''
|
||||
Sometimes skins are parented to other skins. We need to find the skin highest in the hierarchy and
|
||||
parent all skins to its parent (armature_reparent_obj).
|
||||
|
||||
If not skin exists, we just reparent the armature to the parent of the highest node in the skeleton
|
||||
'''
|
||||
armature_reparent_obj = None
|
||||
if has_skin:
|
||||
for model in sort_by_parent(scene.models):
|
||||
if model.model_type == ModelType.SKIN:
|
||||
armature_reparent_obj = None if not model.parent else model_map[model.parent]
|
||||
else:
|
||||
skeleton_parent_name = skel[0].parent
|
||||
for model in scene.models:
|
||||
if model.name == skeleton_parent_name:
|
||||
armature_reparent_obj = None if not skeleton_parent_name else model_map[skeleton_parent_name]
|
||||
|
||||
# Now we reparent the armature to the node (armature_reparent_obj) we just found
|
||||
if armature_reparent_obj is not None and armature.name != armature_reparent_obj.name:
|
||||
armature.parent = armature_reparent_obj
|
||||
|
||||
|
||||
# If an bone exists in the armature, delete its
|
||||
# object counterpart (as created in extract_models)
|
||||
for bone in skel:
|
||||
model_to_remove = model_map[bone.name]
|
||||
if model_to_remove:
|
||||
bpy.data.objects.remove(model_to_remove, do_unlink=True)
|
||||
model_map.pop(bone.name)
|
||||
|
||||
|
||||
# Lastly, hide all that is hidden in the msh scene
|
||||
for model in scene.models:
|
||||
if model.name in model_map:
|
||||
obj = model_map[model.name]
|
||||
if get_is_model_hidden(obj) and len(obj.children) == 0:
|
||||
obj.hide_set(True)
|
46
addons/io_scene_swbf_msh/option_file_parser.py
Normal file
46
addons/io_scene_swbf_msh/option_file_parser.py
Normal file
@@ -0,0 +1,46 @@
|
||||
""" Parses .tga.option and .msh.option files. Only used with the former as of now. """
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class MungeOptions:
|
||||
|
||||
def __init__(self, path_to_option_file):
|
||||
self.options = {}
|
||||
|
||||
if os.path.exists(path_to_option_file):
|
||||
with open(path_to_option_file, 'r') as option_file:
|
||||
option_text = option_file.read()
|
||||
|
||||
option_parts = option_text.split()
|
||||
|
||||
current_parameter = ""
|
||||
|
||||
for part in option_parts:
|
||||
if part.startswith("-"):
|
||||
current_parameter = part[1:]
|
||||
self.options[current_parameter] = ""
|
||||
elif current_parameter:
|
||||
current_value = self.options[current_parameter]
|
||||
# Keep adding to value in case there are vector options
|
||||
self.options[current_parameter] += part if not current_value else (" " + part)
|
||||
|
||||
def is_option_present(self, param):
|
||||
return param in self.options
|
||||
|
||||
def get_bool(self, param, default=False):
|
||||
return True if param in self.options else default
|
||||
|
||||
def get_float(self, param, default=0.0):
|
||||
if param in self.options:
|
||||
try:
|
||||
result = float(self.options[param])
|
||||
except:
|
||||
result = default
|
||||
finally:
|
||||
return result
|
||||
else:
|
||||
return default
|
||||
|
||||
def get_string(self, param, default=""):
|
||||
return self.options.get(param, default)
|
BIN
docs/images/mat_fill_op.png
Normal file
BIN
docs/images/mat_fill_op.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 375 KiB |
@@ -24,14 +24,15 @@
|
||||
+ [Materials.Flags](#materialsflags)
|
||||
+ [Materials.Data](#materialsdata)
|
||||
+ [Materials.Texture Maps](#materialstexture-maps)
|
||||
+ [Materials Operators](#materials-operators)
|
||||
- [Skeletons and Skinning](#skeletons-and-skinning)
|
||||
+ [XSI vs Blender](#xsi-vs-blender)
|
||||
+ [Example Skin Hierarchy](#example-skin-hierarchy)
|
||||
+ [Example Bone-Parent Hierarchy](#example-bone-parent-hierarchy)
|
||||
+ [Skeleton Notes](#skeleton-notes)
|
||||
+ [Skinning Notes](#skinning-notes)
|
||||
- [Animation](#animation)
|
||||
+ [Actions and Animations](#actions-and-animations)
|
||||
+ [Exporter Animation Options](#exporter-animation-options)
|
||||
+ [Animation Notes](#animation-notes)
|
||||
- [Appendices](#appendices)
|
||||
+ [Appendix Detail Map Blending](#appendix-detail-map-blending)
|
||||
@@ -70,15 +71,21 @@ Controls what to export from Blender.
|
||||
#### Apply Modifiers
|
||||
Whether to apply [Modifiers](https://docs.blender.org/manual/en/latest/modeling/modifiers/index.html) during export or not.
|
||||
|
||||
#### Export As Skeleton
|
||||
#### Export Animation(s)
|
||||
|
||||
| | |
|
||||
| ---------------------- | ---------------------------------------------------------------------- |
|
||||
| None | Export the current active scene without animation data. |
|
||||
| Active | Export the current active scene with animation data extracted from the active Action on the scene's Armature. To save space, the exporter will exclude geometry data from the resulting .msh file but will ensure the root object has some geometry and a material for munge compatibility. |
|
||||
| Batch | Export the current active scene with animation data but produce a separate .msh file for and named after each Action in the scene. Exported files will be placed in the selected directory. If a file is selected, they will be placed in that file's directory. This option essentially repeats the export behavior of "Active" for each Action in the current Scene. Be sure to remove an Action from the scene if you do not want it exported! |
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Excludes geometry data from the exported .msh file, BUT ensures that the scene root has dummy geometry to satisfy the animation munger.
|
||||
|
||||
#### Export With Animation
|
||||
|
||||
Convert the active Action on the scene's armature to an SWBF animation and include it in the exported file.
|
||||
|
||||
(Please see [Exporter Animation Options](#exporter-animation-options) for more details on the previous two parameters)
|
||||
|
||||
|
||||
### Export Failures
|
||||
@@ -131,6 +138,12 @@ This error indicates that an object in your scene ends with what looks like an L
|
||||
|
||||
To solve this error consult the [LOD Models](#lod-models) section and rename the problematic objects to use the correct LOD suffix.
|
||||
|
||||
#### "RuntimeError: Could not find an Armature object from which to export animations!"
|
||||
This error is thrown when you intend to export one or more animations but no Armature is found among the objects to be exported.
|
||||
|
||||
|
||||
|
||||
|
||||
### Export Behaviour to Know About
|
||||
|
||||
#### Materials for .msh files must be managed through the added UI panel named "SWBF .msh Properties" is added under the Material context.
|
||||
@@ -186,15 +199,18 @@ Can't imagine this coming up much (Maybe if you're model is just for collisions
|
||||
#### Meshes without any materials will be assigned the first material in the .msh file.
|
||||
This shouldn't be relevant as any mesh that you haven't assigned a material to is likely to just be collision geometry or shadow geometry.
|
||||
|
||||
#### Dummy frames for the scene root will be included when exporting an animation.
|
||||
If the scene root is not keyed in the Action(s) to be exported, dummy frames for the scene root with no translation or rotation will be added to the exported animation.
|
||||
|
||||
## Importer
|
||||
|
||||
This plugin can import .msh files as well as .zaa_ and .zaabin files. .msh files can be imported as models or animations.
|
||||
This plugin can import one or more .msh files as well as .zaabin files. .msh files can be imported as models or animations.
|
||||
|
||||
### Import Properties
|
||||
|
||||
#### Import Animation Only
|
||||
#### Import Animation(s)
|
||||
|
||||
If you wish to import an .msh or zaa_/zaabin file as an animation, check this box. This will only work so long as you have preselected an armature. The imported animation will then be added to the armature as an Action. If an Action with the same name already exists, the importer will replace it.
|
||||
If you wish to import animation data from one or more .msh files or a single .zaabin file, check this box. This will only work so long as you have preselected an Armature! The imported animations will then be added to the Armature as Actions. If an Action with the same name already exists, the importer will replace it.
|
||||
|
||||
### Import Failures
|
||||
|
||||
@@ -561,6 +577,23 @@ Environment map for the material. Used to provide static reflections for the mod
|
||||
Distortion maps control how Refractive materials distort the scene behind them. Should be a Normal Map with '-forceformat v8u8' in it's '.tga.option' file. See Appendix .tga.option Files.
|
||||
|
||||
|
||||
### Materials Operators
|
||||
|
||||
#### Fill SWBF Properties
|
||||
Fills in SWBF properties of each material used by all selected objects. This operator will only work with materials that have ```Use Nodes``` enabled and will just fill in the Diffuse Map property with the name of the image used by the material's Principled BSDF node.
|
||||
|
||||
It is used by selecting the relevant objects and choosing `SWBF` > `Fill SWBF Material Properties` in the `Object Context` menu:
|
||||
<img src="https://raw.githubusercontent.com/SleepKiller/SWBF-msh-Blender-IO/master/docs/images/mat_fill_op.png" width="400" height="400"/>
|
||||
|
||||
#### Generate SWBF Nodes
|
||||
Generates shader nodes that attempt to emulate the SWBF properties of a selected material. Call this operator by clicking the `Generate Nodes` button found at the bottom of the selected material's SWBF properties panel. Only transparency settings and diffuse texture mapping are currently supported. When importing a .msh file, this operator is automatically called on each material extracted from the file.
|
||||
|
||||
You must click the `Generate Nodes` button every time you edit the material properties and wish to see the results. The generated nodes will not automatically update when the SWBF properties are changed.
|
||||
|
||||
It is not necessary to call this operator for materials to correctly export.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -614,6 +647,8 @@ The same goes for objects that are children of an armature, but are parented dir
|
||||
* bone_doorright
|
||||
* right_door_mesh
|
||||
|
||||
### Skeleton Notes
|
||||
1. Animated msh bones can have have geometry. To accomplish this, create a bone in the scene's armature and a mesh object with the same name as the bone. Parent the mesh object to the bone and make sure their origins are equal. When exported, the bone and the mesh object will be merged into one. The inverse process occurs when importing bones with geometry.
|
||||
|
||||
### Skinning Notes
|
||||
|
||||
@@ -640,17 +675,6 @@ When exporting an Action, all frames between and including the first and last *k
|
||||
If you have armature bones that are weighted to by a skinned object, but you do not wish for them to be exported as part of the animated skeleton, don't keyframe them. The exported animation will only include bones that are explicitly keyframed at least once in the Action.
|
||||
|
||||
|
||||
### Exporter Animation Options
|
||||
|
||||
#### ```Export As Skeleton```
|
||||
|
||||
Excludes geometry data from the exported .msh file, since ```zenasset``` ignores it. Skins and static meshes will be exported as nulls. However, since ```zenasset``` does mandate the root object have some material and geometry data, this option will add in dummy geometry and a material to the .msh file's scene root. This isn't necessary for exporting animations, but is highly recommended to avoid writing unnecessary data and ensuring the root object is acceptable to ```zenasset```.
|
||||
|
||||
#### ```Export With Animation```
|
||||
|
||||
If checked, the action currently attached to the scene's armature will be included in the exported msh file as an animation. Dummy frames are also included for the scene root to satisfy ```zenasset```. You do not have to explicitly animate the scene root!
|
||||
|
||||
So, if you wish to export an animation to be munged, it is best to select both ```Export As Skeleton``` and ```Export With Animation.```
|
||||
|
||||
### Animation notes:
|
||||
|
||||
|
Reference in New Issue
Block a user