Compare commits

..

No commits in common. "master" and "v1.0.1" have entirely different histories.

14 changed files with 113 additions and 451 deletions

View File

@ -1,7 +1,7 @@
bl_info = { bl_info = {
'name': 'SWBF .msh Import-Export', 'name': 'SWBF .msh Import-Export',
'author': 'Will Snyder, PrismaticFlower', 'author': 'Will Snyder, SleepKiller',
"version": (1, 3, 0), "version": (1, 0, 0),
'blender': (2, 80, 0), 'blender': (2, 80, 0),
'location': 'File > Import-Export', 'location': 'File > Import-Export',
'description': 'Export as SWBF .msh file', 'description': 'Export as SWBF .msh file',
@ -13,9 +13,9 @@ bl_info = {
} }
# Taken from glTF-Blender-IO, because I do not understand Python that well # Taken from glTF-Blender-IO, because I do not understand Python that well
# (this is the first thing of substance I've created in it) and just wanted # (this is the first thing of substance I've created in it) and just wanted
# script reloading to work. # script reloading to work.
# #
# https://github.com/KhronosGroup/glTF-Blender-IO # https://github.com/KhronosGroup/glTF-Blender-IO
# #
# Copyright 2018-2019 The glTF-Blender-IO authors. # Copyright 2018-2019 The glTF-Blender-IO authors.
@ -118,13 +118,8 @@ class ExportMSH(Operator, ExportHelper):
def execute(self, context): def execute(self, context):
if 'SELECTED' in self.export_target and len(bpy.context.selected_objects) == 0:
raise Exception("{} was chosen, but you have not selected any objects. "
" Don't forget to unhide all the objects you wish to select!".format(self.export_target))
scene, armature_obj = create_scene( scene, armature_obj = create_scene(
generate_triangle_strips=self.generate_triangle_strips, generate_triangle_strips=self.generate_triangle_strips,
apply_modifiers=self.apply_modifiers, apply_modifiers=self.apply_modifiers,
export_target=self.export_target, export_target=self.export_target,
skel_only=self.animation_export != 'NONE') # Exclude geometry data (except root stuff) if we're doing anims skel_only=self.animation_export != 'NONE') # Exclude geometry data (except root stuff) if we're doing anims
@ -141,7 +136,7 @@ class ExportMSH(Operator, ExportHelper):
set_scene_animation(scene, armature_obj) set_scene_animation(scene, armature_obj)
write_scene_to_file(self.filepath, scene) write_scene_to_file(self.filepath, scene)
elif self.animation_export == 'BATCH': elif self.animation_export == 'BATCH':
export_dir = self.filepath if os.path.isdir(self.filepath) else os.path.dirname(self.filepath) export_dir = self.filepath if os.path.isdir(self.filepath) else os.path.dirname(self.filepath)
for action in bpy.data.actions: for action in bpy.data.actions:
@ -162,14 +157,14 @@ def menu_func_export(self, context):
class ImportMSH(Operator, ImportHelper): class ImportMSH(Operator, ImportHelper):
""" Import SWBF .msh file(s). """ """ Import an SWBF .msh file. """
bl_idname = "swbf_msh.import" bl_idname = "swbf_msh.import"
bl_label = "Import SWBF .msh File(s)" bl_label = "Import SWBF .msh File(s)"
filename_ext = ".msh" filename_ext = ".msh"
files: CollectionProperty( files: CollectionProperty(
name="File Path(s)", name="File Path",
type=bpy.types.OperatorFileListElement, type=bpy.types.OperatorFileListElement,
) )
@ -181,7 +176,7 @@ class ImportMSH(Operator, ImportHelper):
animation_only: BoolProperty( animation_only: BoolProperty(
name="Import Animation(s)", name="Import Animation(s)",
description="Import one or more animations from the selected files and append each as a new Action to currently selected Armature.", description="Import on or more animations from the selected files and append each as a new Action to currently selected Armature.",
default=False default=False
) )
@ -193,9 +188,9 @@ class ImportMSH(Operator, ImportHelper):
if filepath.endswith(".zaabin") or filepath.endswith(".zaa"): if filepath.endswith(".zaabin") or filepath.endswith(".zaa"):
extract_and_apply_munged_anim(filepath) extract_and_apply_munged_anim(filepath)
else: else:
with open(filepath, 'rb') as input_file: with open(filepath, 'rb') as input_file:
scene = read_scene(input_file, self.animation_only) scene = read_scene(input_file, self.animation_only)
if not self.animation_only: if not self.animation_only:
extract_scene(filepath, scene) extract_scene(filepath, scene)
else: else:

View File

@ -138,9 +138,6 @@ class Reader:
def how_much_left(self, pos): def how_much_left(self, pos):
return self.end_pos - pos return self.end_pos - pos
def bytes_remaining(self):
return self.end_pos - self.file.tell()
def skip_until(self, header): def skip_until(self, header):
while (self.could_have_child() and header not in self.peak_next_header()): while (self.could_have_child() and header not in self.peak_next_header()):
self.skip_bytes(1) self.skip_bytes(1)

View File

@ -32,6 +32,7 @@ def extract_and_apply_anim(filename : str, scene : Scene):
if scene.animation is None: if scene.animation is None:
raise Exception("No animation found in msh file!") raise Exception("No animation found in msh file!")
else: else:
head, tail = os.path.split(filename) head, tail = os.path.split(filename)
anim_name = tail.split(".")[0] anim_name = tail.split(".")[0]
@ -39,10 +40,6 @@ def extract_and_apply_anim(filename : str, scene : Scene):
if anim_name in bpy.data.actions: if anim_name in bpy.data.actions:
bpy.data.actions.remove(bpy.data.actions[anim_name], do_unlink=True) bpy.data.actions.remove(bpy.data.actions[anim_name], do_unlink=True)
for nt in arma.animation_data.nla_tracks:
if anim_name == nt.strips[0].name:
arma.animation_data.nla_tracks.remove(nt)
action = bpy.data.actions.new(anim_name) action = bpy.data.actions.new(anim_name)
action.use_fake_user = True action.use_fake_user = True
@ -50,7 +47,7 @@ def extract_and_apply_anim(filename : str, scene : Scene):
arma.animation_data_create() arma.animation_data_create()
# Record the starting transforms of each bone. Pose space is relative # Record the starting transforms of each bone. Pose space is relative
# to bones starting transforms. Starting = in edit mode # to bones starting transforms. Starting = in edit mode
bone_bind_poses = {} bone_bind_poses = {}
@ -59,7 +56,7 @@ def extract_and_apply_anim(filename : str, scene : Scene):
for edit_bone in arma.data.edit_bones: for edit_bone in arma.data.edit_bones:
if edit_bone.parent: if edit_bone.parent:
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
else: else:
bone_local = arma.matrix_local @ edit_bone.matrix bone_local = arma.matrix_local @ edit_bone.matrix
@ -75,8 +72,8 @@ def extract_and_apply_anim(filename : str, scene : Scene):
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)] translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name) loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name) rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name) fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
@ -106,5 +103,4 @@ def extract_and_apply_anim(filename : str, scene : Scene):
fcurve_loc_z.keyframe_points.insert(i,t.z) fcurve_loc_z.keyframe_points.insert(i,t.z)
arma.animation_data.action = action arma.animation_data.action = action
track = arma.animation_data.nla_tracks.new()
track.strips.new(action.name, action.frame_range[0], action)

View File

@ -10,8 +10,6 @@ from math import sqrt
from bpy.props import BoolProperty, EnumProperty, StringProperty from bpy.props import BoolProperty, EnumProperty, StringProperty
from bpy.types import Operator, Menu from bpy.types import Operator, Menu
from .option_file_parser import MungeOptions
import os import os
@ -35,69 +33,28 @@ class FillSWBFMaterialProperties(bpy.types.Operator):
slots = sum([list(ob.material_slots) for ob in bpy.context.selected_objects if ob.type == 'MESH'],[]) slots = sum([list(ob.material_slots) for ob in bpy.context.selected_objects if ob.type == 'MESH'],[])
mats = [slot.material for slot in slots if (slot.material and slot.material.node_tree)] mats = [slot.material for slot in slots if (slot.material and slot.material.node_tree)]
mats_visited = set()
for mat in mats: for mat in mats:
if mat.name in mats_visited or not mat.swbf_msh_mat:
continue
else:
mats_visited.add(mat.name)
mat.swbf_msh_mat.doublesided = not mat.use_backface_culling
mat.swbf_msh_mat.hardedged_transparency = (mat.blend_method == "CLIP")
mat.swbf_msh_mat.blended_transparency = (mat.blend_method == "BLEND")
mat.swbf_msh_mat.additive_transparency = (mat.blend_method == "ADDITIVE")
# Below is all for filling the diffuse map/texture_0 fields
try: try:
for BSDF_node in [n for n in mat.node_tree.nodes if n.type == 'BSDF_PRINCIPLED']: for BSDF_node in [n for n in mat.node_tree.nodes if n.type == 'BSDF_PRINCIPLED']:
base_col = BSDF_node.inputs['Base Color'] base_col = BSDF_node.inputs['Base Color']
stack = [] for link in base_col.links :
link_node = link.from_node
texture_node = None if link_node.type != 'TEX_IMAGE':
continue
current_socket = base_col tex_name = link_node.image.filepath
if base_col.is_linked: print(tex_name)
stack.append(base_col.links[0].from_node)
while stack: i = tex_name.find(".tga")
curr_node = stack.pop()
if curr_node.type == 'TEX_IMAGE':
texture_node = curr_node
break
else:
# Crude but good for now
next_nodes = []
for node_input in curr_node.inputs:
for link in node_input.links:
next_nodes.append(link.from_node)
# reversing it so we go from up to down
stack += reversed(next_nodes)
if texture_node is not None:
tex_path = texture_node.image.filepath
tex_name = os.path.basename(tex_path)
i = tex_name.find('.')
# Get rid of trailing number in case one is present # Get rid of trailing number in case one is present
if i > 0: if i > 0:
tex_name = tex_name[0:i] + ".tga" tex_name = tex_name[0:i+4]
refined_tex_path = os.path.join(os.path.dirname(tex_path), tex_name)
mat.swbf_msh_mat.diffuse_map = refined_tex_path
mat.swbf_msh_mat.texture_0 = refined_tex_path
mat.swbf_msh_mat.rendertype = 'NORMAL_BF2'
mat.swbf_msh_mat.diffuse_map = tex_name
break break
except: except:
# Many chances for null ref exceptions. None if user reads doc section... # Many chances for null ref exceptions. None if user reads doc section...
@ -126,7 +83,6 @@ def draw_matfill_menu(self, context):
# Creates shader nodes to emulate SWBF material properties. # Creates shader nodes to emulate SWBF material properties.
# Will probably only support for a narrow subset of properties... # Will probably only support for a narrow subset of properties...
# So much fun to write this, will probably do all render types by end of October
class GenerateMaterialNodesFromSWBFProperties(bpy.types.Operator): class GenerateMaterialNodesFromSWBFProperties(bpy.types.Operator):
@ -151,172 +107,45 @@ to provide an exact emulation"""
def execute(self, context): def execute(self, context):
material = bpy.data.materials.get(self.material_name, None) material = bpy.data.materials[self.material_name]
if not material or not material.swbf_msh_mat: if material and material.swbf_msh_mat:
return {'CANCELLED'}
mat_props = material.swbf_msh_mat mat_props = material.swbf_msh_mat
diffuse_texture_path = mat_props.diffuse_map
if diffuse_texture_path and os.path.exists(diffuse_texture_path):
material.use_nodes = True
material.node_tree.nodes.clear()
texture_input_nodes = [] bsdf = material.node_tree.nodes.new("ShaderNodeBsdfPrincipled")
surface_output_nodes = []
texImage = material.node_tree.nodes.new('ShaderNodeTexImage')
texImage.image = bpy.data.images.load(diffuse_texture_path)
texImage.image.alpha_mode = 'CHANNEL_PACKED'
material.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
# Op will give up if no diffuse map is present. bsdf.inputs["Roughness"].default_value = 1.0
# Eventually more nuance will be added for different bsdf.inputs["Specular"].default_value = 0.0
# rtypes
diffuse_texture_path = mat_props.diffuse_map
if diffuse_texture_path and os.path.exists(diffuse_texture_path):
material.use_nodes = True
material.node_tree.nodes.clear()
bsdf = material.node_tree.nodes.new("ShaderNodeBsdfPrincipled")
texImage = material.node_tree.nodes.new('ShaderNodeTexImage')
texImage.image = bpy.data.images.load(diffuse_texture_path)
texImage.image.alpha_mode = 'CHANNEL_PACKED'
material.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
texture_input_nodes.append(texImage)
bsdf.inputs["Roughness"].default_value = 1.0
bsdf.inputs["Specular"].default_value = 0.0
material.use_backface_culling = not bool(mat_props.doublesided)
surface_output_nodes.append(('BSDF', bsdf))
if not mat_props.glow:
if mat_props.hardedged_transparency: if mat_props.hardedged_transparency:
material.blend_method = "CLIP" material.blend_method = "CLIP"
material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha']) material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha'])
elif mat_props.blended_transparency:
material.blend_method = "BLEND"
material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha'])
elif mat_props.additive_transparency:
# most complex material.use_backface_culling = not bool(mat_props.doublesided)
transparent_bsdf = material.node_tree.nodes.new("ShaderNodeBsdfTransparent")
add_shader = material.node_tree.nodes.new("ShaderNodeAddShader")
material.node_tree.links.new(add_shader.inputs[0], bsdf.outputs["BSDF"]) output = material.node_tree.nodes.new("ShaderNodeOutputMaterial")
material.node_tree.links.new(add_shader.inputs[1], transparent_bsdf.outputs["BSDF"]) material.node_tree.links.new(output.inputs['Surface'], bsdf.outputs['BSDF'])
surface_output_nodes[0] = ('Shader', add_shader)
# Glow (adds another shader output)
else: else:
emission = material.node_tree.nodes.new("ShaderNodeEmission") # Todo: figure out some way to raise an error but continue operator execution...
material.node_tree.links.new(emission.inputs['Color'], texImage.outputs['Color']) if self.fail_silently:
return {'CANCELLED'}
emission_strength_multiplier = material.node_tree.nodes.new("ShaderNodeMath")
emission_strength_multiplier.operation = 'MULTIPLY'
emission_strength_multiplier.inputs[1].default_value = 32.0
material.node_tree.links.new(emission_strength_multiplier.inputs[0], texImage.outputs['Alpha'])
material.node_tree.links.new(emission.inputs['Strength'], emission_strength_multiplier.outputs[0])
surface_output_nodes.append(("Emission", emission))
surfaces_output = None
if (len(surface_output_nodes) == 1):
surfaces_output = surface_output_nodes[0][1]
else:
mix = material.node_tree.nodes.new("ShaderNodeMixShader")
material.node_tree.links.new(mix.inputs[1], surface_output_nodes[0][1].outputs[0])
material.node_tree.links.new(mix.inputs[2], surface_output_nodes[1][1].outputs[0])
surfaces_output = mix
# Normal/bump mapping (needs more rendertype support!)
if "NORMALMAP" in mat_props.rendertype and mat_props.normal_map and os.path.exists(mat_props.normal_map):
normalMapTexImage = material.node_tree.nodes.new('ShaderNodeTexImage')
normalMapTexImage.image = bpy.data.images.load(mat_props.normal_map)
normalMapTexImage.image.alpha_mode = 'CHANNEL_PACKED'
normalMapTexImage.image.colorspace_settings.name = 'Non-Color'
texture_input_nodes.append(normalMapTexImage)
options = MungeOptions(mat_props.normal_map + ".option")
if options.get_bool("bumpmap"):
# First we must convert the RGB data to brightness
rgb_to_bw_node = material.node_tree.nodes.new("ShaderNodeRGBToBW")
material.node_tree.links.new(rgb_to_bw_node.inputs["Color"], normalMapTexImage.outputs["Color"])
# Now create a bump map node (perhaps we could also use this with normals and just plug color into normal input?)
bumpMapNode = material.node_tree.nodes.new('ShaderNodeBump')
bumpMapNode.inputs["Distance"].default_value = options.get_float("bumpscale", default=1.0)
material.node_tree.links.new(bumpMapNode.inputs["Height"], rgb_to_bw_node.outputs["Val"])
normalsOutputNode = bumpMapNode
else: else:
raise RuntimeError(f"Diffuse texture at path: '{diffuse_texture_path}' was not found.")
normalMapNode = material.node_tree.nodes.new('ShaderNodeNormalMap')
material.node_tree.links.new(normalMapNode.inputs["Color"], normalMapTexImage.outputs["Color"])
normalsOutputNode = normalMapNode
material.node_tree.links.new(bsdf.inputs['Normal'], normalsOutputNode.outputs["Normal"])
output = material.node_tree.nodes.new("ShaderNodeOutputMaterial")
material.node_tree.links.new(output.inputs['Surface'], surfaces_output.outputs[0])
# Scrolling
# This approach works 90% of the time, but notably produces very incorrect results
# on mus1_bldg_world_1,2,3
# Clear all anims in all cases
if material.node_tree.animation_data:
material.node_tree.animation_data_clear()
if "SCROLL" in mat_props.rendertype:
uv_input = material.node_tree.nodes.new("ShaderNodeUVMap")
vector_add = material.node_tree.nodes.new("ShaderNodeVectorMath")
# Add keyframes
scroll_per_sec_divisor = 255.0
frame_step = 60.0
fps = bpy.context.scene.render.fps
for i in range(2):
vector_add.inputs[1].default_value[0] = i * mat_props.scroll_speed_u * frame_step / scroll_per_sec_divisor
vector_add.inputs[1].keyframe_insert("default_value", index=0, frame=i * frame_step * fps)
vector_add.inputs[1].default_value[1] = i * mat_props.scroll_speed_v * frame_step / scroll_per_sec_divisor
vector_add.inputs[1].keyframe_insert("default_value", index=1, frame=i * frame_step * fps)
material.node_tree.links.new(vector_add.inputs[0], uv_input.outputs[0])
for texture_node in texture_input_nodes:
material.node_tree.links.new(texture_node.inputs["Vector"], vector_add.outputs[0])
# Don't know how to set interpolation when adding keyframes
# so we must do it after the fact
if material.node_tree.animation_data and material.node_tree.animation_data.action:
for fcurve in material.node_tree.animation_data.action.fcurves:
for kf in fcurve.keyframe_points.values():
kf.interpolation = 'LINEAR'
'''
else:
# Todo: figure out some way to raise an error but continue operator execution...
if self.fail_silently:
return {'CANCELLED'}
else:
raise RuntimeError(f"Diffuse texture at path: '{diffuse_texture_path}' was not found.")
'''
return {'FINISHED'} return {'FINISHED'}

View File

@ -2,9 +2,10 @@
import bpy import bpy
from typing import Dict from typing import Dict
from .msh_material_properties import *
from .msh_material import * from .msh_material import *
from .msh_material_gather import *
from .msh_material_properties import *
from .msh_material_operators import *
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
@ -14,8 +15,6 @@ import os
def find_texture_path(folder_path : str, name : str) -> str: def find_texture_path(folder_path : str, name : str) -> str:
if not folder_path or not name: if not folder_path or not name:
@ -106,7 +105,7 @@ def _fill_material_props_data(material, material_properties):
anim_length_index = int(sqrt(material.data[0])) anim_length_index = int(sqrt(material.data[0]))
if anim_length_index < 0: if anim_length_index < 0:
anim_length_index = 0 anim_length_index = 0
elif anim_length_index >= len(UI_MATERIAL_ANIMATION_LENGTHS): elif anim_length_index > len(UI_MATERIAL_ANIMATION_LENGTHS):
anim_length_index = len(UI_MATERIAL_ANIMATION_LENGTHS) - 1 anim_length_index = len(UI_MATERIAL_ANIMATION_LENGTHS) - 1
material_properties.animation_length = UI_MATERIAL_ANIMATION_LENGTHS[anim_length_index][0] material_properties.animation_length = UI_MATERIAL_ANIMATION_LENGTHS[anim_length_index][0]

View File

@ -40,7 +40,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
vertex_positions = [] vertex_positions = []
vertex_uvs = [] vertex_uvs = []
vertex_normals = [] vertex_normals = []
vertex_colors = []
# Keeps track of which vertices each group of weights affects # Keeps track of which vertices each group of weights affects
# i.e. maps offset of vertices -> weights that affect them # i.e. maps offset of vertices -> weights that affect them
@ -59,7 +58,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
if model.geometry: if model.geometry:
geometry_has_colors = any(segment.colors for segment in model.geometry)
for segment in model.geometry: for segment in model.geometry:
@ -78,11 +76,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
if segment.normals: if segment.normals:
vertex_normals += [tuple(convert_vector_space(n)) for n in segment.normals] vertex_normals += [tuple(convert_vector_space(n)) for n in segment.normals]
if segment.colors:
vertex_colors.extend(segment.colors)
elif geometry_has_colors:
[vertex_colors.extend([0.0, 0.0, 0.0, 1.0]) for _ in range(len(segment.positions))]
if segment.weights: if segment.weights:
vertex_weights_offsets[polygon_index_offset] = segment.weights vertex_weights_offsets[polygon_index_offset] = segment.weights
@ -118,6 +111,7 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
blender_mesh.vertices.add(len(vertex_positions)) blender_mesh.vertices.add(len(vertex_positions))
blender_mesh.vertices.foreach_set("co", [component for vertex_position in vertex_positions for component in vertex_position]) blender_mesh.vertices.foreach_set("co", [component for vertex_position in vertex_positions for component in vertex_position])
# LOOPS # LOOPS
flat_indices = [index for polygon in polygons for index in polygon] flat_indices = [index for polygon in polygons for index in polygon]
@ -135,10 +129,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
blender_mesh.uv_layers.new(do_init=False) blender_mesh.uv_layers.new(do_init=False)
blender_mesh.uv_layers[0].data.foreach_set("uv", [component for i in flat_indices for component in vertex_uvs[i]]) blender_mesh.uv_layers[0].data.foreach_set("uv", [component for i in flat_indices for component in vertex_uvs[i]])
# Colors
if geometry_has_colors:
blender_mesh.color_attributes.new("COLOR0", "FLOAT_COLOR", "POINT")
blender_mesh.color_attributes[0].data.foreach_set("color", vertex_colors)
# POLYGONS/FACES # POLYGONS/FACES

View File

@ -73,7 +73,7 @@ class Model:
name: str = "Model" name: str = "Model"
parent: str = "" parent: str = ""
model_type: ModelType = ModelType.NULL model_type: ModelType = ModelType.NULL
hidden: bool = False hidden: bool = True
transform: ModelTransform = field(default_factory=ModelTransform) transform: ModelTransform = field(default_factory=ModelTransform)

View File

@ -32,62 +32,33 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
pure_bones_from_armature = {} pure_bones_from_armature = {}
armature_found = None armature_found = None
# Non-bone objects that will be exported objects_to_export = select_objects(export_target)
blender_objects_to_export = []
# This must be seperate from the list above, for uneval_obj in objects_to_export:
# since exported objects will contain Blender objects as well as bones if uneval_obj.type == "ARMATURE":
# Here we just keep track of all names, regardless of origin
exported_object_names: Set[str] = set()
# Me must keep track of hidden objects separately because
# evaluated_get clears hidden status
blender_objects_to_hide: Set[str] = set()
# Armature must be processed before everything else!
# In this loop we also build a set of names of all objects
# that will be exported. This is necessary so we can prune vertex
# groups that do not reference exported objects in the main
# model building loop below this one.
for uneval_obj in select_objects(export_target):
if get_is_model_hidden(uneval_obj):
blender_objects_to_hide.add(uneval_obj.name)
if uneval_obj.type == "ARMATURE" and not armature_found:
# Keep track of the armature, we don't want to process > 1!
armature_found = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj armature_found = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
# Get all bones in a separate list. While we iterate through
# objects we removed bones with geometry from this dict. After iteration
# is done, we add the remaining bones to the models from exported
# scene objects.
pure_bones_from_armature = expand_armature(armature_found) pure_bones_from_armature = expand_armature(armature_found)
# All bones to set break
exported_object_names.update(pure_bones_from_armature.keys())
elif not (uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents):
exported_object_names.add(uneval_obj.name)
blender_objects_to_export.append(uneval_obj)
else:
pass
for uneval_obj in blender_objects_to_export: for uneval_obj in objects_to_export:
if uneval_obj.type == "ARMATURE" or (uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents):
continue
obj = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj obj = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
check_for_bad_lod_suffix(obj) check_for_bad_lod_suffix(obj)
# Test for a mesh object that should be a BONE on export. # Test for a mesh object that is actually a BONE (shares name with bone_parent)
# If so, we inject geometry into the BONE while not modifying it's transform/name # If so, we inject geometry into the BONE while not modifying it's transform/name
# and remove it from the set of BONES without geometry (pure). if obj.parent_bone and obj.parent_bone in pure_bones_from_armature:
if obj.name in pure_bones_from_armature: model = pure_bones_from_armature[obj.parent_bone]
model = pure_bones_from_armature.pop(obj.name) # Since we found a composite bone, removed it from the dict of pure bones
pure_bones_from_armature.pop(obj.parent_bone)
else: else:
model = Model() model = Model()
model.name = obj.name model.name = obj.name
model.model_type = ModelType.NULL if skeleton_only else get_model_type(obj, armature_found) model.model_type = get_model_type(obj, skeleton_only)
model.hidden = get_is_model_hidden(obj)
transform = obj.matrix_local transform = obj.matrix_local
@ -111,19 +82,10 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
model.transform.rotation = convert_rotation_space(local_rotation) model.transform.rotation = convert_rotation_space(local_rotation)
model.transform.translation = convert_vector_space(local_translation) model.transform.translation = convert_vector_space(local_translation)
if obj.type in MESH_OBJECT_TYPES and not skeleton_only: if obj.type in MESH_OBJECT_TYPES:
# Vertex groups are often used for purposes other than skinning.
# Here we gather all vgroups and select the ones that reference
# objects included in the export.
valid_vgroup_indices : Set[int] = set()
if model.model_type == ModelType.SKIN:
valid_vgroups = [group for group in obj.vertex_groups if group.name in exported_object_names]
valid_vgroup_indices = { group.index for group in valid_vgroups }
model.bone_map = [ group.name for group in valid_vgroups ]
mesh = obj.to_mesh() mesh = obj.to_mesh()
model.geometry = create_mesh_geometry(mesh, valid_vgroup_indices) model.geometry = create_mesh_geometry(mesh, obj.vertex_groups)
obj.to_mesh_clear() obj.to_mesh_clear()
@ -136,17 +98,20 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
raise RuntimeError(f"Object '{obj.name}' has resulted in a .msh geometry segment that has " raise RuntimeError(f"Object '{obj.name}' has resulted in a .msh geometry segment that has "
f"more than {MAX_MSH_VERTEX_COUNT} vertices! Split the object's mesh up " f"more than {MAX_MSH_VERTEX_COUNT} vertices! Split the object's mesh up "
f"and try again!") f"and try again!")
if obj.vertex_groups:
model.bone_map = [group.name for group in obj.vertex_groups]
if get_is_collision_primitive(obj): if get_is_collision_primitive(obj):
model.collisionprimitive = get_collision_primitive(obj) model.collisionprimitive = get_collision_primitive(obj)
model.hidden = model.name in blender_objects_to_hide
models_list.append(model) models_list.append(model)
# We removed all composite bones after looking through the objects, # We removed all composite bones after looking through the objects,
# so the bones left are all pure and we add them all here. # so the bones left are all pure and we add them all here.
return (models_list + list(pure_bones_from_armature.values()), armature_found) models_list += pure_bones_from_armature.values()
return (models_list, armature_found)
@ -162,7 +127,7 @@ def create_parents_set() -> Set[str]:
return parents return parents
def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -> List[GeometrySegment]: def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[GeometrySegment]:
""" Creates a list of GeometrySegment objects from a Blender mesh. """ Creates a list of GeometrySegment objects from a Blender mesh.
Does NOT create triangle strips in the GeometrySegment however. """ Does NOT create triangle strips in the GeometrySegment however. """
@ -179,11 +144,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
vertex_remap: List[Dict[Tuple[int, int], int]] = [dict() for i in range(material_count)] vertex_remap: List[Dict[Tuple[int, int], int]] = [dict() for i in range(material_count)]
polygons: List[Set[int]] = [set() for i in range(material_count)] polygons: List[Set[int]] = [set() for i in range(material_count)]
if mesh.color_attributes.active_color is not None: if mesh.vertex_colors.active is not None:
for segment in segments: for segment in segments:
segment.colors = [] segment.colors = []
if valid_vgroup_indices: if has_weights:
for segment in segments: for segment in segments:
segment.weights = [] segment.weights = []
@ -215,16 +180,13 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
yield mesh.uv_layers.active.data[loop_index].uv.y yield mesh.uv_layers.active.data[loop_index].uv.y
if segment.colors is not None: if segment.colors is not None:
data_type = mesh.color_attributes.active_color.data_type for v in mesh.vertex_colors.active.data[loop_index].color:
if data_type == "FLOAT_COLOR" or data_type == "BYTE_COLOR": yield v
for v in mesh.color_attributes.active_color.data[vertex_index].color:
yield v
if segment.weights is not None: if segment.weights is not None:
for v in mesh.vertices[vertex_index].groups: for v in mesh.vertices[vertex_index].groups:
if v.group in valid_vgroup_indices: yield v.group
yield v.group yield v.weight
yield v.weight
vertex_cache_entry = tuple(get_cache_vertex()) vertex_cache_entry = tuple(get_cache_vertex())
cached_vertex_index = cache.get(vertex_cache_entry, vertex_cache_miss_index) cached_vertex_index = cache.get(vertex_cache_entry, vertex_cache_miss_index)
@ -247,13 +209,12 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
segment.texcoords.append(mesh.uv_layers.active.data[loop_index].uv.copy()) segment.texcoords.append(mesh.uv_layers.active.data[loop_index].uv.copy())
if segment.colors is not None: if segment.colors is not None:
data_type = mesh.color_attributes.active_color.data_type segment.colors.append(list(mesh.vertex_colors.active.data[loop_index].color))
if data_type == "FLOAT_COLOR" or data_type == "BYTE_COLOR":
segment.colors.append(list(mesh.color_attributes.active_color.data[vertex_index].color))
if segment.weights is not None: if segment.weights is not None:
groups = mesh.vertices[vertex_index].groups groups = mesh.vertices[vertex_index].groups
segment.weights.append([VertexWeight(v.weight, v.group) for v in groups if v.group in valid_vgroup_indices])
segment.weights.append([VertexWeight(v.weight, v.group) for v in groups])
return new_index return new_index
@ -272,29 +233,12 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
return segments return segments
def get_model_type(obj: bpy.types.Object, armature_found: bpy.types.Object) -> ModelType: def get_model_type(obj: bpy.types.Object, skel_only: bool) -> ModelType:
""" Get the ModelType for a Blender object. """ """ Get the ModelType for a Blender object. """
if obj.type in MESH_OBJECT_TYPES: if obj.type in MESH_OBJECT_TYPES and not skel_only:
# Objects can have vgroups for non-skinning purposes. if obj.vertex_groups:
# If we can find one vgroup that shares a name with a bone in the return ModelType.SKIN
# armature, we know the vgroup is for weighting purposes and thus
# the object is a skin. Otherwise, interpret it as a static mesh.
# We must also check that an armature included in the export
# and that it is the same one this potential skin is weighting to.
# If we failed to do this, a user could export a selected object
# that is a skin, but the weight data in the export would reference
# nonexistent models!
if (obj.vertex_groups and armature_found and
obj.parent and obj.parent.name == armature_found.name):
for vgroup in obj.vertex_groups:
if vgroup.name in armature_found.data.bones:
return ModelType.SKIN
return ModelType.STATIC
else: else:
return ModelType.STATIC return ModelType.STATIC
@ -303,9 +247,6 @@ def get_model_type(obj: bpy.types.Object, armature_found: bpy.types.Object) -> M
def get_is_model_hidden(obj: bpy.types.Object) -> bool: def get_is_model_hidden(obj: bpy.types.Object) -> bool:
""" Gets if a Blender object should be marked as hidden in the .msh file. """ """ Gets if a Blender object should be marked as hidden in the .msh file. """
if obj.hide_get():
return True
name = obj.name.lower() name = obj.name.lower()
if name.startswith("c_"): if name.startswith("c_"):
@ -370,14 +311,6 @@ def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveSh
""" Gets the CollisionPrimitiveShape of an object or raises an error if """ Gets the CollisionPrimitiveShape of an object or raises an error if
it can't. """ it can't. """
# arc170 fighter has examples of box colliders without proper naming
# and cis_hover_aat has a cylinder which is named p_vehiclesphere.
# To export these properly we must check the collision_prim property
# that was assigned on import BEFORE looking at the name.
prim_type = obj.swbf_msh_coll_prim.prim_type
if prim_type in [item.value for item in CollisionPrimitiveShape]:
return CollisionPrimitiveShape(prim_type)
name = obj.name.lower() name = obj.name.lower()
if "sphere" in name or "sphr" in name or "spr" in name: if "sphere" in name or "sphr" in name or "spr" in name:
@ -387,6 +320,11 @@ def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveSh
if "box" in name or "cube" in name or "cuboid" in name: if "box" in name or "cube" in name or "cuboid" in name:
return CollisionPrimitiveShape.BOX return CollisionPrimitiveShape.BOX
# arc170 fighter has examples of box colliders without proper naming
prim_type = obj.swbf_msh_coll_prim.prim_type
if prim_type in [item.value for item in CollisionPrimitiveShape]:
return CollisionPrimitiveShape(prim_type)
raise RuntimeError(f"Object '{obj.name}' has no primitive type specified in it's name!") raise RuntimeError(f"Object '{obj.name}' has no primitive type specified in it's name!")
@ -498,7 +436,6 @@ def expand_armature(armature: bpy.types.Object) -> Dict[str, Model]:
model.model_type = ModelType.BONE if bone.name in proper_BONES else ModelType.NULL model.model_type = ModelType.BONE if bone.name in proper_BONES else ModelType.NULL
model.name = bone.name model.name = bone.name
model.hidden = True
model.transform.rotation = convert_rotation_space(local_rotation) model.transform.rotation = convert_rotation_space(local_rotation)
model.transform.translation = convert_vector_space(local_translation) model.transform.translation = convert_vector_space(local_translation)

View File

@ -8,25 +8,6 @@ import math
from mathutils import Vector, Matrix from mathutils import Vector, Matrix
# Convert model with geometry to null.
# Currently not used, but could be necessary in the future.
def make_null(model : Model):
model.model_type = ModelType.NULL
bone_map = None
geometry = None
# I think this is all we need to check for to avoid
# common ZE/ZETools crashes...
def validate_geometry_segment(segment : GeometrySegment) -> bool:
if not segment.positions or not segment.triangle_strips:
return False
else:
return True
def inject_dummy_data(model : Model): def inject_dummy_data(model : Model):
""" Adds a triangle and material to the model (scene root). Needed to export zenasst-compatible skeletons. """ """ Adds a triangle and material to the model (scene root). Needed to export zenasst-compatible skeletons. """
model.hidden = True model.hidden = True

View File

@ -165,23 +165,19 @@ def _read_matd(matd: Reader) -> Material:
elif next_header == "TX0D": elif next_header == "TX0D":
with matd.read_child() as tx0d: with matd.read_child() as tx0d:
if tx0d.bytes_remaining() > 0: mat.texture0 = tx0d.read_string()
mat.texture0 = tx0d.read_string()
elif next_header == "TX1D": elif next_header == "TX1D":
with matd.read_child() as tx1d: with matd.read_child() as tx1d:
if tx1d.bytes_remaining() > 0: mat.texture1 = tx1d.read_string()
mat.texture1 = tx1d.read_string()
elif next_header == "TX2D": elif next_header == "TX2D":
with matd.read_child() as tx2d: with matd.read_child() as tx2d:
if tx2d.bytes_remaining() > 0: mat.texture2 = tx2d.read_string()
mat.texture2 = tx2d.read_string()
elif next_header == "TX3D": elif next_header == "TX3D":
with matd.read_child() as tx3d: with matd.read_child() as tx3d:
if tx3d.bytes_remaining() > 0: mat.texture3 = tx3d.read_string()
mat.texture3 = tx3d.read_string()
else: else:
matd.skip_bytes(1) matd.skip_bytes(1)
@ -207,9 +203,7 @@ def _read_modl(modl: Reader, materials_list: List[Material]) -> Model:
global model_counter global model_counter
global mndx_remap global mndx_remap
mndx_remap[index] = model_counter
if index not in mndx_remap:
mndx_remap[index] = model_counter
model_counter += 1 model_counter += 1

View File

@ -195,5 +195,6 @@ def extract_scene(filepath: str, scene: Scene):
for model in scene.models: for model in scene.models:
if model.name in model_map: if model.name in model_map:
obj = model_map[model.name] obj = model_map[model.name]
obj.hide_set(model.hidden or get_is_model_hidden(obj)) if get_is_model_hidden(obj) and len(obj.children) == 0:
obj.hide_set(True)

View File

@ -9,7 +9,7 @@ from mathutils import Vector
from .msh_model import Model, Animation, ModelType from .msh_model import Model, Animation, ModelType
from .msh_scene import Scene, SceneAABB from .msh_scene import Scene, SceneAABB
from .msh_model_gather import gather_models from .msh_model_gather import gather_models
from .msh_model_utilities import make_null, validate_geometry_segment, sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix, inject_dummy_data from .msh_model_utilities import sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix, inject_dummy_data
from .msh_model_triangle_strips import create_models_triangle_strips from .msh_model_triangle_strips import create_models_triangle_strips
from .msh_material import * from .msh_material import *
from .msh_material_gather import gather_materials from .msh_material_gather import gather_materials
@ -53,20 +53,6 @@ def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_t
for segment in model.geometry: for segment in model.geometry:
segment.triangle_strips = segment.triangles segment.triangle_strips = segment.triangles
# After generating triangle strips we must prune any segments that don't have
# them, or else ZE and most versions of ZETools will crash.
# We could also make models with no valid segments nulls, since they might as well be,
# but that could have unforseeable consequences further down the modding pipeline
# and is not necessary to avoid the aforementioned crashes...
for model in scene.models:
if model.geometry is not None:
# Doing this in msh_model_gather would be messy and the presence/absence
# of triangle strips is required for a validity check.
model.geometry = [segment for segment in model.geometry if validate_geometry_segment(segment)]
#if not model.geometry:
# make_null(model)
if has_multiple_root_models(scene.models): if has_multiple_root_models(scene.models):
scene.models = reparent_model_roots(scene.models) scene.models = reparent_model_roots(scene.models)

View File

@ -39,9 +39,12 @@ def pack_color(color) -> int:
return packed return packed
def unpack_color(color: int) -> List[float]: def unpack_color(color: int) -> List[float]:
r = (color >> 16 & 0xFF) / 255.0
g = (color >> 8 & 0xFF) / 255.0 mask = int(0x000000ff)
b = (color >> 0 & 0xFF) / 255.0
a = (color >> 24 & 0xFF) / 255.0 r = (color & (mask << 16)) / 255.0
g = (color & (mask << 8)) / 255.0
b = (color & mask) / 255.0
a = (color & (mask << 24)) / 255.0
return [r,g,b,a] return [r,g,b,a]

View File

@ -1,46 +0,0 @@
""" Parses .tga.option and .msh.option files. Only used with the former as of now. """
import os
class MungeOptions:
def __init__(self, path_to_option_file):
self.options = {}
if os.path.exists(path_to_option_file):
with open(path_to_option_file, 'r') as option_file:
option_text = option_file.read()
option_parts = option_text.split()
current_parameter = ""
for part in option_parts:
if part.startswith("-"):
current_parameter = part[1:]
self.options[current_parameter] = ""
elif current_parameter:
current_value = self.options[current_parameter]
# Keep adding to value in case there are vector options
self.options[current_parameter] += part if not current_value else (" " + part)
def is_option_present(self, param):
return param in self.options
def get_bool(self, param, default=False):
return True if param in self.options else default
def get_float(self, param, default=0.0):
if param in self.options:
try:
result = float(self.options[param])
except:
result = default
finally:
return result
else:
return default
def get_string(self, param, default=""):
return self.options.get(param, default)