Compare commits
No commits in common. "master" and "v1.0.0" have entirely different histories.
|
@ -1,7 +1,7 @@
|
|||
bl_info = {
|
||||
'name': 'SWBF .msh Import-Export',
|
||||
'author': 'Will Snyder, PrismaticFlower',
|
||||
"version": (1, 3, 0),
|
||||
'author': 'Will Snyder, SleepKiller',
|
||||
"version": (1, 0, 0),
|
||||
'blender': (2, 80, 0),
|
||||
'location': 'File > Import-Export',
|
||||
'description': 'Export as SWBF .msh file',
|
||||
|
@ -64,7 +64,6 @@ from .msh_skeleton_properties import *
|
|||
from .msh_collision_prim_properties import *
|
||||
from .msh_material_operators import *
|
||||
from .msh_scene_to_blend import *
|
||||
from .msh_anim_to_blend import *
|
||||
from .zaa_to_blend import *
|
||||
|
||||
|
||||
|
@ -118,11 +117,6 @@ class ExportMSH(Operator, ExportHelper):
|
|||
|
||||
def execute(self, context):
|
||||
|
||||
if 'SELECTED' in self.export_target and len(bpy.context.selected_objects) == 0:
|
||||
raise Exception("{} was chosen, but you have not selected any objects. "
|
||||
" Don't forget to unhide all the objects you wish to select!".format(self.export_target))
|
||||
|
||||
|
||||
scene, armature_obj = create_scene(
|
||||
generate_triangle_strips=self.generate_triangle_strips,
|
||||
apply_modifiers=self.apply_modifiers,
|
||||
|
@ -162,14 +156,14 @@ def menu_func_export(self, context):
|
|||
|
||||
|
||||
class ImportMSH(Operator, ImportHelper):
|
||||
""" Import SWBF .msh file(s). """
|
||||
""" Import an SWBF .msh file. """
|
||||
|
||||
bl_idname = "swbf_msh.import"
|
||||
bl_label = "Import SWBF .msh File(s)"
|
||||
filename_ext = ".msh"
|
||||
|
||||
files: CollectionProperty(
|
||||
name="File Path(s)",
|
||||
name="File Path",
|
||||
type=bpy.types.OperatorFileListElement,
|
||||
)
|
||||
|
||||
|
@ -181,7 +175,7 @@ class ImportMSH(Operator, ImportHelper):
|
|||
|
||||
animation_only: BoolProperty(
|
||||
name="Import Animation(s)",
|
||||
description="Import one or more animations from the selected files and append each as a new Action to currently selected Armature.",
|
||||
description="Import on or more animations from the selected files and append each as a new Action to currently selected Armature.",
|
||||
default=False
|
||||
)
|
||||
|
||||
|
|
|
@ -138,9 +138,6 @@ class Reader:
|
|||
def how_much_left(self, pos):
|
||||
return self.end_pos - pos
|
||||
|
||||
def bytes_remaining(self):
|
||||
return self.end_pos - self.file.tell()
|
||||
|
||||
def skip_until(self, header):
|
||||
while (self.could_have_child() and header not in self.peak_next_header()):
|
||||
self.skip_bytes(1)
|
||||
|
|
|
@ -1,110 +0,0 @@
|
|||
""" Gathers the Blender objects from the current scene and returns them as a list of
|
||||
Model objects. """
|
||||
|
||||
import bpy
|
||||
import bmesh
|
||||
import math
|
||||
|
||||
from enum import Enum
|
||||
from typing import List, Set, Dict, Tuple
|
||||
|
||||
from .msh_scene import Scene
|
||||
from .msh_material_to_blend import *
|
||||
from .msh_model import *
|
||||
from .msh_skeleton_utilities import *
|
||||
from .msh_skeleton_to_blend import *
|
||||
from .msh_model_gather import get_is_model_hidden
|
||||
from .msh_mesh_to_blend import model_to_mesh_object
|
||||
|
||||
|
||||
from .crc import *
|
||||
|
||||
import os
|
||||
|
||||
|
||||
# Extracts and applies anims in the scene to the currently selected armature
|
||||
def extract_and_apply_anim(filename : str, scene : Scene):
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
|
||||
if not arma or arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if scene.animation is None:
|
||||
raise Exception("No animation found in msh file!")
|
||||
else:
|
||||
head, tail = os.path.split(filename)
|
||||
anim_name = tail.split(".")[0]
|
||||
|
||||
if anim_name in bpy.data.actions:
|
||||
bpy.data.actions.remove(bpy.data.actions[anim_name], do_unlink=True)
|
||||
|
||||
for nt in arma.animation_data.nla_tracks:
|
||||
if anim_name == nt.strips[0].name:
|
||||
arma.animation_data.nla_tracks.remove(nt)
|
||||
|
||||
action = bpy.data.actions.new(anim_name)
|
||||
action.use_fake_user = True
|
||||
|
||||
if not arma.animation_data:
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
# Record the starting transforms of each bone. Pose space is relative
|
||||
# to bones starting transforms. Starting = in edit mode
|
||||
bone_bind_poses = {}
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if edit_bone.parent:
|
||||
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bone_local = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bone_local.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
for bone in arma.pose.bones:
|
||||
if to_crc(bone.name) in scene.animation.bone_frames:
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
|
||||
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
|
||||
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
for frame in rotation_frames:
|
||||
i = frame.index
|
||||
q = (bind_mat @ convert_rotation_space(frame.rotation).to_matrix().to_4x4()).to_quaternion()
|
||||
|
||||
fcurve_rot_w.keyframe_points.insert(i,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(i,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(i,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(i,q.z)
|
||||
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in translation_frames:
|
||||
i = frame.index
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(frame.translation))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(i,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(i,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(i,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
track = arma.animation_data.nla_tracks.new()
|
||||
track.strips.new(action.name, action.frame_range[0], action)
|
|
@ -10,8 +10,6 @@ from math import sqrt
|
|||
from bpy.props import BoolProperty, EnumProperty, StringProperty
|
||||
from bpy.types import Operator, Menu
|
||||
|
||||
from .option_file_parser import MungeOptions
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
@ -35,69 +33,28 @@ class FillSWBFMaterialProperties(bpy.types.Operator):
|
|||
slots = sum([list(ob.material_slots) for ob in bpy.context.selected_objects if ob.type == 'MESH'],[])
|
||||
mats = [slot.material for slot in slots if (slot.material and slot.material.node_tree)]
|
||||
|
||||
mats_visited = set()
|
||||
|
||||
for mat in mats:
|
||||
|
||||
if mat.name in mats_visited or not mat.swbf_msh_mat:
|
||||
continue
|
||||
else:
|
||||
mats_visited.add(mat.name)
|
||||
|
||||
mat.swbf_msh_mat.doublesided = not mat.use_backface_culling
|
||||
mat.swbf_msh_mat.hardedged_transparency = (mat.blend_method == "CLIP")
|
||||
mat.swbf_msh_mat.blended_transparency = (mat.blend_method == "BLEND")
|
||||
mat.swbf_msh_mat.additive_transparency = (mat.blend_method == "ADDITIVE")
|
||||
|
||||
|
||||
# Below is all for filling the diffuse map/texture_0 fields
|
||||
|
||||
try:
|
||||
for BSDF_node in [n for n in mat.node_tree.nodes if n.type == 'BSDF_PRINCIPLED']:
|
||||
base_col = BSDF_node.inputs['Base Color']
|
||||
|
||||
stack = []
|
||||
for link in base_col.links :
|
||||
link_node = link.from_node
|
||||
|
||||
texture_node = None
|
||||
if link_node.type != 'TEX_IMAGE':
|
||||
continue
|
||||
|
||||
current_socket = base_col
|
||||
if base_col.is_linked:
|
||||
stack.append(base_col.links[0].from_node)
|
||||
tex_name = link_node.image.filepath
|
||||
print(tex_name)
|
||||
|
||||
while stack:
|
||||
|
||||
curr_node = stack.pop()
|
||||
|
||||
if curr_node.type == 'TEX_IMAGE':
|
||||
texture_node = curr_node
|
||||
break
|
||||
else:
|
||||
# Crude but good for now
|
||||
next_nodes = []
|
||||
for node_input in curr_node.inputs:
|
||||
for link in node_input.links:
|
||||
next_nodes.append(link.from_node)
|
||||
# reversing it so we go from up to down
|
||||
stack += reversed(next_nodes)
|
||||
|
||||
|
||||
if texture_node is not None:
|
||||
|
||||
tex_path = texture_node.image.filepath
|
||||
|
||||
tex_name = os.path.basename(tex_path)
|
||||
|
||||
i = tex_name.find('.')
|
||||
i = tex_name.find(".tga")
|
||||
|
||||
# Get rid of trailing number in case one is present
|
||||
if i > 0:
|
||||
tex_name = tex_name[0:i] + ".tga"
|
||||
|
||||
refined_tex_path = os.path.join(os.path.dirname(tex_path), tex_name)
|
||||
|
||||
mat.swbf_msh_mat.diffuse_map = refined_tex_path
|
||||
mat.swbf_msh_mat.texture_0 = refined_tex_path
|
||||
tex_name = tex_name[0:i+4]
|
||||
|
||||
mat.swbf_msh_mat.rendertype = 'NORMAL_BF2'
|
||||
mat.swbf_msh_mat.diffuse_map = tex_name
|
||||
break
|
||||
except:
|
||||
# Many chances for null ref exceptions. None if user reads doc section...
|
||||
|
@ -126,7 +83,6 @@ def draw_matfill_menu(self, context):
|
|||
|
||||
# Creates shader nodes to emulate SWBF material properties.
|
||||
# Will probably only support for a narrow subset of properties...
|
||||
# So much fun to write this, will probably do all render types by end of October
|
||||
|
||||
class GenerateMaterialNodesFromSWBFProperties(bpy.types.Operator):
|
||||
|
||||
|
@ -151,20 +107,12 @@ to provide an exact emulation"""
|
|||
|
||||
def execute(self, context):
|
||||
|
||||
material = bpy.data.materials.get(self.material_name, None)
|
||||
material = bpy.data.materials[self.material_name]
|
||||
|
||||
if not material or not material.swbf_msh_mat:
|
||||
return {'CANCELLED'}
|
||||
if material and material.swbf_msh_mat:
|
||||
|
||||
mat_props = material.swbf_msh_mat
|
||||
|
||||
|
||||
texture_input_nodes = []
|
||||
surface_output_nodes = []
|
||||
|
||||
# Op will give up if no diffuse map is present.
|
||||
# Eventually more nuance will be added for different
|
||||
# rtypes
|
||||
diffuse_texture_path = mat_props.diffuse_map
|
||||
if diffuse_texture_path and os.path.exists(diffuse_texture_path):
|
||||
|
||||
|
@ -178,137 +126,18 @@ to provide an exact emulation"""
|
|||
texImage.image.alpha_mode = 'CHANNEL_PACKED'
|
||||
material.node_tree.links.new(bsdf.inputs['Base Color'], texImage.outputs['Color'])
|
||||
|
||||
texture_input_nodes.append(texImage)
|
||||
|
||||
bsdf.inputs["Roughness"].default_value = 1.0
|
||||
bsdf.inputs["Specular"].default_value = 0.0
|
||||
|
||||
material.use_backface_culling = not bool(mat_props.doublesided)
|
||||
|
||||
surface_output_nodes.append(('BSDF', bsdf))
|
||||
|
||||
if not mat_props.glow:
|
||||
if mat_props.hardedged_transparency:
|
||||
material.blend_method = "CLIP"
|
||||
material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha'])
|
||||
elif mat_props.blended_transparency:
|
||||
material.blend_method = "BLEND"
|
||||
material.node_tree.links.new(bsdf.inputs['Alpha'], texImage.outputs['Alpha'])
|
||||
elif mat_props.additive_transparency:
|
||||
|
||||
# most complex
|
||||
transparent_bsdf = material.node_tree.nodes.new("ShaderNodeBsdfTransparent")
|
||||
add_shader = material.node_tree.nodes.new("ShaderNodeAddShader")
|
||||
|
||||
material.node_tree.links.new(add_shader.inputs[0], bsdf.outputs["BSDF"])
|
||||
material.node_tree.links.new(add_shader.inputs[1], transparent_bsdf.outputs["BSDF"])
|
||||
|
||||
surface_output_nodes[0] = ('Shader', add_shader)
|
||||
|
||||
# Glow (adds another shader output)
|
||||
else:
|
||||
|
||||
emission = material.node_tree.nodes.new("ShaderNodeEmission")
|
||||
material.node_tree.links.new(emission.inputs['Color'], texImage.outputs['Color'])
|
||||
|
||||
emission_strength_multiplier = material.node_tree.nodes.new("ShaderNodeMath")
|
||||
emission_strength_multiplier.operation = 'MULTIPLY'
|
||||
emission_strength_multiplier.inputs[1].default_value = 32.0
|
||||
|
||||
material.node_tree.links.new(emission_strength_multiplier.inputs[0], texImage.outputs['Alpha'])
|
||||
|
||||
material.node_tree.links.new(emission.inputs['Strength'], emission_strength_multiplier.outputs[0])
|
||||
|
||||
surface_output_nodes.append(("Emission", emission))
|
||||
|
||||
surfaces_output = None
|
||||
if (len(surface_output_nodes) == 1):
|
||||
surfaces_output = surface_output_nodes[0][1]
|
||||
else:
|
||||
mix = material.node_tree.nodes.new("ShaderNodeMixShader")
|
||||
material.node_tree.links.new(mix.inputs[1], surface_output_nodes[0][1].outputs[0])
|
||||
material.node_tree.links.new(mix.inputs[2], surface_output_nodes[1][1].outputs[0])
|
||||
|
||||
surfaces_output = mix
|
||||
|
||||
# Normal/bump mapping (needs more rendertype support!)
|
||||
if "NORMALMAP" in mat_props.rendertype and mat_props.normal_map and os.path.exists(mat_props.normal_map):
|
||||
normalMapTexImage = material.node_tree.nodes.new('ShaderNodeTexImage')
|
||||
normalMapTexImage.image = bpy.data.images.load(mat_props.normal_map)
|
||||
normalMapTexImage.image.alpha_mode = 'CHANNEL_PACKED'
|
||||
normalMapTexImage.image.colorspace_settings.name = 'Non-Color'
|
||||
texture_input_nodes.append(normalMapTexImage)
|
||||
|
||||
options = MungeOptions(mat_props.normal_map + ".option")
|
||||
|
||||
if options.get_bool("bumpmap"):
|
||||
|
||||
# First we must convert the RGB data to brightness
|
||||
rgb_to_bw_node = material.node_tree.nodes.new("ShaderNodeRGBToBW")
|
||||
material.node_tree.links.new(rgb_to_bw_node.inputs["Color"], normalMapTexImage.outputs["Color"])
|
||||
|
||||
# Now create a bump map node (perhaps we could also use this with normals and just plug color into normal input?)
|
||||
bumpMapNode = material.node_tree.nodes.new('ShaderNodeBump')
|
||||
bumpMapNode.inputs["Distance"].default_value = options.get_float("bumpscale", default=1.0)
|
||||
material.node_tree.links.new(bumpMapNode.inputs["Height"], rgb_to_bw_node.outputs["Val"])
|
||||
|
||||
normalsOutputNode = bumpMapNode
|
||||
|
||||
else:
|
||||
|
||||
normalMapNode = material.node_tree.nodes.new('ShaderNodeNormalMap')
|
||||
material.node_tree.links.new(normalMapNode.inputs["Color"], normalMapTexImage.outputs["Color"])
|
||||
|
||||
normalsOutputNode = normalMapNode
|
||||
|
||||
material.node_tree.links.new(bsdf.inputs['Normal'], normalsOutputNode.outputs["Normal"])
|
||||
|
||||
|
||||
material.use_backface_culling = not bool(mat_props.doublesided)
|
||||
|
||||
output = material.node_tree.nodes.new("ShaderNodeOutputMaterial")
|
||||
material.node_tree.links.new(output.inputs['Surface'], surfaces_output.outputs[0])
|
||||
material.node_tree.links.new(output.inputs['Surface'], bsdf.outputs['BSDF'])
|
||||
|
||||
|
||||
|
||||
# Scrolling
|
||||
# This approach works 90% of the time, but notably produces very incorrect results
|
||||
# on mus1_bldg_world_1,2,3
|
||||
|
||||
# Clear all anims in all cases
|
||||
if material.node_tree.animation_data:
|
||||
material.node_tree.animation_data_clear()
|
||||
|
||||
|
||||
if "SCROLL" in mat_props.rendertype:
|
||||
uv_input = material.node_tree.nodes.new("ShaderNodeUVMap")
|
||||
|
||||
vector_add = material.node_tree.nodes.new("ShaderNodeVectorMath")
|
||||
|
||||
# Add keyframes
|
||||
scroll_per_sec_divisor = 255.0
|
||||
frame_step = 60.0
|
||||
fps = bpy.context.scene.render.fps
|
||||
for i in range(2):
|
||||
vector_add.inputs[1].default_value[0] = i * mat_props.scroll_speed_u * frame_step / scroll_per_sec_divisor
|
||||
vector_add.inputs[1].keyframe_insert("default_value", index=0, frame=i * frame_step * fps)
|
||||
|
||||
vector_add.inputs[1].default_value[1] = i * mat_props.scroll_speed_v * frame_step / scroll_per_sec_divisor
|
||||
vector_add.inputs[1].keyframe_insert("default_value", index=1, frame=i * frame_step * fps)
|
||||
|
||||
|
||||
material.node_tree.links.new(vector_add.inputs[0], uv_input.outputs[0])
|
||||
|
||||
for texture_node in texture_input_nodes:
|
||||
material.node_tree.links.new(texture_node.inputs["Vector"], vector_add.outputs[0])
|
||||
|
||||
# Don't know how to set interpolation when adding keyframes
|
||||
# so we must do it after the fact
|
||||
if material.node_tree.animation_data and material.node_tree.animation_data.action:
|
||||
for fcurve in material.node_tree.animation_data.action.fcurves:
|
||||
for kf in fcurve.keyframe_points.values():
|
||||
kf.interpolation = 'LINEAR'
|
||||
|
||||
'''
|
||||
else:
|
||||
|
||||
# Todo: figure out some way to raise an error but continue operator execution...
|
||||
|
@ -316,7 +145,7 @@ to provide an exact emulation"""
|
|||
return {'CANCELLED'}
|
||||
else:
|
||||
raise RuntimeError(f"Diffuse texture at path: '{diffuse_texture_path}' was not found.")
|
||||
'''
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
import bpy
|
||||
from typing import Dict
|
||||
|
||||
from .msh_material_properties import *
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import *
|
||||
from .msh_material_properties import *
|
||||
from .msh_material_operators import *
|
||||
|
||||
from .msh_material_utilities import _REVERSE_RENDERTYPES_MAPPING
|
||||
|
||||
|
@ -14,8 +15,6 @@ import os
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
def find_texture_path(folder_path : str, name : str) -> str:
|
||||
|
||||
if not folder_path or not name:
|
||||
|
@ -106,7 +105,7 @@ def _fill_material_props_data(material, material_properties):
|
|||
anim_length_index = int(sqrt(material.data[0]))
|
||||
if anim_length_index < 0:
|
||||
anim_length_index = 0
|
||||
elif anim_length_index >= len(UI_MATERIAL_ANIMATION_LENGTHS):
|
||||
elif anim_length_index > len(UI_MATERIAL_ANIMATION_LENGTHS):
|
||||
anim_length_index = len(UI_MATERIAL_ANIMATION_LENGTHS) - 1
|
||||
|
||||
material_properties.animation_length = UI_MATERIAL_ANIMATION_LENGTHS[anim_length_index][0]
|
||||
|
|
|
@ -18,7 +18,19 @@ _RENDERTYPES_MAPPING = {
|
|||
"NORMALMAPPED_TILED_ENVMAPPED_BF2": Rendertype.NORMALMAPPED_TILED_ENVMAP}
|
||||
|
||||
|
||||
_REVERSE_RENDERTYPES_MAPPING = {val: key for (key, val) in _RENDERTYPES_MAPPING.items()}
|
||||
_REVERSE_RENDERTYPES_MAPPING = {
|
||||
Rendertype.NORMAL : "NORMAL_BF2",
|
||||
Rendertype.SCROLLING : "SCROLLING_BF2",
|
||||
Rendertype.ENVMAPPED : "ENVMAPPED_BF2",
|
||||
Rendertype.ANIMATED : "ANIMATED_BF2",
|
||||
Rendertype.REFRACTION : "REFRACTION_BF2",
|
||||
Rendertype.BLINK : "BLINK_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED : "NORMALMAPPED_TILED_BF2",
|
||||
Rendertype.NORMALMAPPED_ENVMAPPED : "NORMALMAPPED_ENVMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED : "NORMALMAPPED_BF2",
|
||||
Rendertype.NORMALMAPPED_TILED_ENVMAP : "NORMALMAPPED_TILED_ENVMAPPED_BF2"}
|
||||
|
||||
|
||||
|
||||
|
||||
def remove_unused_materials(materials: Dict[str, Material],
|
||||
|
|
|
@ -40,7 +40,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
|
|||
vertex_positions = []
|
||||
vertex_uvs = []
|
||||
vertex_normals = []
|
||||
vertex_colors = []
|
||||
|
||||
# Keeps track of which vertices each group of weights affects
|
||||
# i.e. maps offset of vertices -> weights that affect them
|
||||
|
@ -59,7 +58,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
|
|||
|
||||
|
||||
if model.geometry:
|
||||
geometry_has_colors = any(segment.colors for segment in model.geometry)
|
||||
|
||||
for segment in model.geometry:
|
||||
|
||||
|
@ -78,11 +76,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
|
|||
if segment.normals:
|
||||
vertex_normals += [tuple(convert_vector_space(n)) for n in segment.normals]
|
||||
|
||||
if segment.colors:
|
||||
vertex_colors.extend(segment.colors)
|
||||
elif geometry_has_colors:
|
||||
[vertex_colors.extend([0.0, 0.0, 0.0, 1.0]) for _ in range(len(segment.positions))]
|
||||
|
||||
if segment.weights:
|
||||
vertex_weights_offsets[polygon_index_offset] = segment.weights
|
||||
|
||||
|
@ -118,6 +111,7 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
|
|||
blender_mesh.vertices.add(len(vertex_positions))
|
||||
blender_mesh.vertices.foreach_set("co", [component for vertex_position in vertex_positions for component in vertex_position])
|
||||
|
||||
|
||||
# LOOPS
|
||||
|
||||
flat_indices = [index for polygon in polygons for index in polygon]
|
||||
|
@ -135,10 +129,6 @@ def model_to_mesh_object(model: Model, scene : Scene, materials_map : Dict[str,
|
|||
blender_mesh.uv_layers.new(do_init=False)
|
||||
blender_mesh.uv_layers[0].data.foreach_set("uv", [component for i in flat_indices for component in vertex_uvs[i]])
|
||||
|
||||
# Colors
|
||||
if geometry_has_colors:
|
||||
blender_mesh.color_attributes.new("COLOR0", "FLOAT_COLOR", "POINT")
|
||||
blender_mesh.color_attributes[0].data.foreach_set("color", vertex_colors)
|
||||
|
||||
|
||||
# POLYGONS/FACES
|
||||
|
|
|
@ -73,7 +73,7 @@ class Model:
|
|||
name: str = "Model"
|
||||
parent: str = ""
|
||||
model_type: ModelType = ModelType.NULL
|
||||
hidden: bool = False
|
||||
hidden: bool = True
|
||||
|
||||
transform: ModelTransform = field(default_factory=ModelTransform)
|
||||
|
||||
|
|
|
@ -30,64 +30,34 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
|
|||
# Pure bones are just bones and after all objects are explored the only
|
||||
# entries remaining in this dict will be bones without geometry.
|
||||
pure_bones_from_armature = {}
|
||||
armature_found = None
|
||||
|
||||
# Non-bone objects that will be exported
|
||||
blender_objects_to_export = []
|
||||
objects_to_export = select_objects(export_target)
|
||||
|
||||
# This must be seperate from the list above,
|
||||
# since exported objects will contain Blender objects as well as bones
|
||||
# Here we just keep track of all names, regardless of origin
|
||||
exported_object_names: Set[str] = set()
|
||||
|
||||
# Me must keep track of hidden objects separately because
|
||||
# evaluated_get clears hidden status
|
||||
blender_objects_to_hide: Set[str] = set()
|
||||
|
||||
# Armature must be processed before everything else!
|
||||
|
||||
# In this loop we also build a set of names of all objects
|
||||
# that will be exported. This is necessary so we can prune vertex
|
||||
# groups that do not reference exported objects in the main
|
||||
# model building loop below this one.
|
||||
for uneval_obj in select_objects(export_target):
|
||||
|
||||
if get_is_model_hidden(uneval_obj):
|
||||
blender_objects_to_hide.add(uneval_obj.name)
|
||||
|
||||
if uneval_obj.type == "ARMATURE" and not armature_found:
|
||||
# Keep track of the armature, we don't want to process > 1!
|
||||
for uneval_obj in objects_to_export:
|
||||
if uneval_obj.type == "ARMATURE":
|
||||
armature_found = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
|
||||
# Get all bones in a separate list. While we iterate through
|
||||
# objects we removed bones with geometry from this dict. After iteration
|
||||
# is done, we add the remaining bones to the models from exported
|
||||
# scene objects.
|
||||
pure_bones_from_armature = expand_armature(armature_found)
|
||||
# All bones to set
|
||||
exported_object_names.update(pure_bones_from_armature.keys())
|
||||
break
|
||||
|
||||
elif not (uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents):
|
||||
exported_object_names.add(uneval_obj.name)
|
||||
blender_objects_to_export.append(uneval_obj)
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
for uneval_obj in blender_objects_to_export:
|
||||
for uneval_obj in objects_to_export:
|
||||
if uneval_obj.type == "ARMATURE" or (uneval_obj.type in SKIPPED_OBJECT_TYPES and uneval_obj.name not in parents):
|
||||
continue
|
||||
|
||||
obj = uneval_obj.evaluated_get(depsgraph) if apply_modifiers else uneval_obj
|
||||
|
||||
check_for_bad_lod_suffix(obj)
|
||||
|
||||
# Test for a mesh object that should be a BONE on export.
|
||||
# Test for a mesh object that is actually a BONE (shares name with bone_parent)
|
||||
# If so, we inject geometry into the BONE while not modifying it's transform/name
|
||||
# and remove it from the set of BONES without geometry (pure).
|
||||
if obj.name in pure_bones_from_armature:
|
||||
model = pure_bones_from_armature.pop(obj.name)
|
||||
if obj.parent_bone and obj.parent_bone in pure_bones_from_armature:
|
||||
model = pure_bones_from_armature[obj.parent_bone]
|
||||
# Since we found a composite bone, removed it from the dict of pure bones
|
||||
pure_bones_from_armature.pop(obj.parent_bone)
|
||||
else:
|
||||
model = Model()
|
||||
model.name = obj.name
|
||||
model.model_type = ModelType.NULL if skeleton_only else get_model_type(obj, armature_found)
|
||||
model.model_type = get_model_type(obj, skeleton_only)
|
||||
model.hidden = get_is_model_hidden(obj)
|
||||
|
||||
transform = obj.matrix_local
|
||||
|
||||
|
@ -111,19 +81,10 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
|
|||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
model.transform.translation = convert_vector_space(local_translation)
|
||||
|
||||
if obj.type in MESH_OBJECT_TYPES and not skeleton_only:
|
||||
|
||||
# Vertex groups are often used for purposes other than skinning.
|
||||
# Here we gather all vgroups and select the ones that reference
|
||||
# objects included in the export.
|
||||
valid_vgroup_indices : Set[int] = set()
|
||||
if model.model_type == ModelType.SKIN:
|
||||
valid_vgroups = [group for group in obj.vertex_groups if group.name in exported_object_names]
|
||||
valid_vgroup_indices = { group.index for group in valid_vgroups }
|
||||
model.bone_map = [ group.name for group in valid_vgroups ]
|
||||
if obj.type in MESH_OBJECT_TYPES:
|
||||
|
||||
mesh = obj.to_mesh()
|
||||
model.geometry = create_mesh_geometry(mesh, valid_vgroup_indices)
|
||||
model.geometry = create_mesh_geometry(mesh, obj.vertex_groups)
|
||||
|
||||
obj.to_mesh_clear()
|
||||
|
||||
|
@ -136,17 +97,20 @@ def gather_models(apply_modifiers: bool, export_target: str, skeleton_only: bool
|
|||
raise RuntimeError(f"Object '{obj.name}' has resulted in a .msh geometry segment that has "
|
||||
f"more than {MAX_MSH_VERTEX_COUNT} vertices! Split the object's mesh up "
|
||||
f"and try again!")
|
||||
if obj.vertex_groups:
|
||||
model.bone_map = [group.name for group in obj.vertex_groups]
|
||||
|
||||
|
||||
if get_is_collision_primitive(obj):
|
||||
model.collisionprimitive = get_collision_primitive(obj)
|
||||
|
||||
model.hidden = model.name in blender_objects_to_hide
|
||||
|
||||
models_list.append(model)
|
||||
|
||||
# We removed all composite bones after looking through the objects,
|
||||
# so the bones left are all pure and we add them all here.
|
||||
return (models_list + list(pure_bones_from_armature.values()), armature_found)
|
||||
models_list += pure_bones_from_armature.values()
|
||||
|
||||
return (models_list, armature_found)
|
||||
|
||||
|
||||
|
||||
|
@ -162,7 +126,7 @@ def create_parents_set() -> Set[str]:
|
|||
|
||||
return parents
|
||||
|
||||
def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -> List[GeometrySegment]:
|
||||
def create_mesh_geometry(mesh: bpy.types.Mesh, has_weights: bool) -> List[GeometrySegment]:
|
||||
""" Creates a list of GeometrySegment objects from a Blender mesh.
|
||||
Does NOT create triangle strips in the GeometrySegment however. """
|
||||
|
||||
|
@ -179,11 +143,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
|
|||
vertex_remap: List[Dict[Tuple[int, int], int]] = [dict() for i in range(material_count)]
|
||||
polygons: List[Set[int]] = [set() for i in range(material_count)]
|
||||
|
||||
if mesh.color_attributes.active_color is not None:
|
||||
if mesh.vertex_colors.active is not None:
|
||||
for segment in segments:
|
||||
segment.colors = []
|
||||
|
||||
if valid_vgroup_indices:
|
||||
if has_weights:
|
||||
for segment in segments:
|
||||
segment.weights = []
|
||||
|
||||
|
@ -215,14 +179,11 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
|
|||
yield mesh.uv_layers.active.data[loop_index].uv.y
|
||||
|
||||
if segment.colors is not None:
|
||||
data_type = mesh.color_attributes.active_color.data_type
|
||||
if data_type == "FLOAT_COLOR" or data_type == "BYTE_COLOR":
|
||||
for v in mesh.color_attributes.active_color.data[vertex_index].color:
|
||||
for v in mesh.vertex_colors.active.data[loop_index].color:
|
||||
yield v
|
||||
|
||||
if segment.weights is not None:
|
||||
for v in mesh.vertices[vertex_index].groups:
|
||||
if v.group in valid_vgroup_indices:
|
||||
yield v.group
|
||||
yield v.weight
|
||||
|
||||
|
@ -247,13 +208,12 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
|
|||
segment.texcoords.append(mesh.uv_layers.active.data[loop_index].uv.copy())
|
||||
|
||||
if segment.colors is not None:
|
||||
data_type = mesh.color_attributes.active_color.data_type
|
||||
if data_type == "FLOAT_COLOR" or data_type == "BYTE_COLOR":
|
||||
segment.colors.append(list(mesh.color_attributes.active_color.data[vertex_index].color))
|
||||
segment.colors.append(list(mesh.vertex_colors.active.data[loop_index].color))
|
||||
|
||||
if segment.weights is not None:
|
||||
groups = mesh.vertices[vertex_index].groups
|
||||
segment.weights.append([VertexWeight(v.weight, v.group) for v in groups if v.group in valid_vgroup_indices])
|
||||
|
||||
segment.weights.append([VertexWeight(v.weight, v.group) for v in groups])
|
||||
|
||||
return new_index
|
||||
|
||||
|
@ -272,29 +232,12 @@ def create_mesh_geometry(mesh: bpy.types.Mesh, valid_vgroup_indices: Set[int]) -
|
|||
|
||||
return segments
|
||||
|
||||
def get_model_type(obj: bpy.types.Object, armature_found: bpy.types.Object) -> ModelType:
|
||||
def get_model_type(obj: bpy.types.Object, skel_only: bool) -> ModelType:
|
||||
""" Get the ModelType for a Blender object. """
|
||||
|
||||
if obj.type in MESH_OBJECT_TYPES:
|
||||
# Objects can have vgroups for non-skinning purposes.
|
||||
# If we can find one vgroup that shares a name with a bone in the
|
||||
# armature, we know the vgroup is for weighting purposes and thus
|
||||
# the object is a skin. Otherwise, interpret it as a static mesh.
|
||||
|
||||
# We must also check that an armature included in the export
|
||||
# and that it is the same one this potential skin is weighting to.
|
||||
# If we failed to do this, a user could export a selected object
|
||||
# that is a skin, but the weight data in the export would reference
|
||||
# nonexistent models!
|
||||
if (obj.vertex_groups and armature_found and
|
||||
obj.parent and obj.parent.name == armature_found.name):
|
||||
|
||||
for vgroup in obj.vertex_groups:
|
||||
if vgroup.name in armature_found.data.bones:
|
||||
if obj.type in MESH_OBJECT_TYPES and not skel_only:
|
||||
if obj.vertex_groups:
|
||||
return ModelType.SKIN
|
||||
|
||||
return ModelType.STATIC
|
||||
|
||||
else:
|
||||
return ModelType.STATIC
|
||||
|
||||
|
@ -303,9 +246,6 @@ def get_model_type(obj: bpy.types.Object, armature_found: bpy.types.Object) -> M
|
|||
def get_is_model_hidden(obj: bpy.types.Object) -> bool:
|
||||
""" Gets if a Blender object should be marked as hidden in the .msh file. """
|
||||
|
||||
if obj.hide_get():
|
||||
return True
|
||||
|
||||
name = obj.name.lower()
|
||||
|
||||
if name.startswith("c_"):
|
||||
|
@ -370,14 +310,6 @@ def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveSh
|
|||
""" Gets the CollisionPrimitiveShape of an object or raises an error if
|
||||
it can't. """
|
||||
|
||||
# arc170 fighter has examples of box colliders without proper naming
|
||||
# and cis_hover_aat has a cylinder which is named p_vehiclesphere.
|
||||
# To export these properly we must check the collision_prim property
|
||||
# that was assigned on import BEFORE looking at the name.
|
||||
prim_type = obj.swbf_msh_coll_prim.prim_type
|
||||
if prim_type in [item.value for item in CollisionPrimitiveShape]:
|
||||
return CollisionPrimitiveShape(prim_type)
|
||||
|
||||
name = obj.name.lower()
|
||||
|
||||
if "sphere" in name or "sphr" in name or "spr" in name:
|
||||
|
@ -387,6 +319,11 @@ def get_collision_primitive_shape(obj: bpy.types.Object) -> CollisionPrimitiveSh
|
|||
if "box" in name or "cube" in name or "cuboid" in name:
|
||||
return CollisionPrimitiveShape.BOX
|
||||
|
||||
# arc170 fighter has examples of box colliders without proper naming
|
||||
prim_type = obj.swbf_msh_coll_prim.prim_type
|
||||
if prim_type in [item.value for item in CollisionPrimitiveShape]:
|
||||
return CollisionPrimitiveShape(prim_type)
|
||||
|
||||
raise RuntimeError(f"Object '{obj.name}' has no primitive type specified in it's name!")
|
||||
|
||||
|
||||
|
@ -498,7 +435,6 @@ def expand_armature(armature: bpy.types.Object) -> Dict[str, Model]:
|
|||
|
||||
model.model_type = ModelType.BONE if bone.name in proper_BONES else ModelType.NULL
|
||||
model.name = bone.name
|
||||
model.hidden = True
|
||||
model.transform.rotation = convert_rotation_space(local_rotation)
|
||||
model.transform.translation = convert_vector_space(local_translation)
|
||||
|
||||
|
|
|
@ -8,25 +8,6 @@ import math
|
|||
from mathutils import Vector, Matrix
|
||||
|
||||
|
||||
|
||||
# Convert model with geometry to null.
|
||||
# Currently not used, but could be necessary in the future.
|
||||
def make_null(model : Model):
|
||||
model.model_type = ModelType.NULL
|
||||
bone_map = None
|
||||
geometry = None
|
||||
|
||||
|
||||
# I think this is all we need to check for to avoid
|
||||
# common ZE/ZETools crashes...
|
||||
def validate_geometry_segment(segment : GeometrySegment) -> bool:
|
||||
if not segment.positions or not segment.triangle_strips:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def inject_dummy_data(model : Model):
|
||||
""" Adds a triangle and material to the model (scene root). Needed to export zenasst-compatible skeletons. """
|
||||
model.hidden = True
|
||||
|
|
|
@ -165,22 +165,18 @@ def _read_matd(matd: Reader) -> Material:
|
|||
|
||||
elif next_header == "TX0D":
|
||||
with matd.read_child() as tx0d:
|
||||
if tx0d.bytes_remaining() > 0:
|
||||
mat.texture0 = tx0d.read_string()
|
||||
|
||||
elif next_header == "TX1D":
|
||||
with matd.read_child() as tx1d:
|
||||
if tx1d.bytes_remaining() > 0:
|
||||
mat.texture1 = tx1d.read_string()
|
||||
|
||||
elif next_header == "TX2D":
|
||||
with matd.read_child() as tx2d:
|
||||
if tx2d.bytes_remaining() > 0:
|
||||
mat.texture2 = tx2d.read_string()
|
||||
|
||||
elif next_header == "TX3D":
|
||||
with matd.read_child() as tx3d:
|
||||
if tx3d.bytes_remaining() > 0:
|
||||
mat.texture3 = tx3d.read_string()
|
||||
|
||||
else:
|
||||
|
@ -207,8 +203,6 @@ def _read_modl(modl: Reader, materials_list: List[Material]) -> Model:
|
|||
|
||||
global model_counter
|
||||
global mndx_remap
|
||||
|
||||
if index not in mndx_remap:
|
||||
mndx_remap[index] = model_counter
|
||||
|
||||
model_counter += 1
|
||||
|
|
|
@ -23,6 +23,92 @@ import os
|
|||
|
||||
|
||||
|
||||
# Extracts and applies anims in the scene to the currently selected armature
|
||||
def extract_and_apply_anim(filename : str, scene : Scene):
|
||||
|
||||
arma = bpy.context.view_layer.objects.active
|
||||
|
||||
if not arma or arma.type != 'ARMATURE':
|
||||
raise Exception("Select an armature to attach the imported animation to!")
|
||||
|
||||
if scene.animation is None:
|
||||
raise Exception("No animation found in msh file!")
|
||||
|
||||
else:
|
||||
head, tail = os.path.split(filename)
|
||||
anim_name = tail.split(".")[0]
|
||||
|
||||
if anim_name in bpy.data.actions:
|
||||
bpy.data.actions.remove(bpy.data.actions[anim_name], do_unlink=True)
|
||||
|
||||
action = bpy.data.actions.new(anim_name)
|
||||
action.use_fake_user = True
|
||||
|
||||
if not arma.animation_data:
|
||||
arma.animation_data_create()
|
||||
|
||||
|
||||
# Record the starting transforms of each bone. Pose space is relative
|
||||
# to bones starting transforms. Starting = in edit mode
|
||||
bone_bind_poses = {}
|
||||
|
||||
bpy.context.view_layer.objects.active = arma
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
for edit_bone in arma.data.edit_bones:
|
||||
if edit_bone.parent:
|
||||
bone_local = edit_bone.parent.matrix.inverted() @ edit_bone.matrix
|
||||
else:
|
||||
bone_local = arma.matrix_local @ edit_bone.matrix
|
||||
|
||||
bone_bind_poses[edit_bone.name] = bone_local.inverted()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
for bone in arma.pose.bones:
|
||||
if to_crc(bone.name) in scene.animation.bone_frames:
|
||||
|
||||
bind_mat = bone_bind_poses[bone.name]
|
||||
|
||||
translation_frames, rotation_frames = scene.animation.bone_frames[to_crc(bone.name)]
|
||||
|
||||
loc_data_path = "pose.bones[\"{}\"].location".format(bone.name)
|
||||
rot_data_path = "pose.bones[\"{}\"].rotation_quaternion".format(bone.name)
|
||||
|
||||
|
||||
fcurve_rot_w = action.fcurves.new(rot_data_path, index=0, action_group=bone.name)
|
||||
fcurve_rot_x = action.fcurves.new(rot_data_path, index=1, action_group=bone.name)
|
||||
fcurve_rot_y = action.fcurves.new(rot_data_path, index=2, action_group=bone.name)
|
||||
fcurve_rot_z = action.fcurves.new(rot_data_path, index=3, action_group=bone.name)
|
||||
|
||||
for frame in rotation_frames:
|
||||
i = frame.index
|
||||
q = (bind_mat @ convert_rotation_space(frame.rotation).to_matrix().to_4x4()).to_quaternion()
|
||||
|
||||
fcurve_rot_w.keyframe_points.insert(i,q.w)
|
||||
fcurve_rot_x.keyframe_points.insert(i,q.x)
|
||||
fcurve_rot_y.keyframe_points.insert(i,q.y)
|
||||
fcurve_rot_z.keyframe_points.insert(i,q.z)
|
||||
|
||||
fcurve_loc_x = action.fcurves.new(loc_data_path, index=0, action_group=bone.name)
|
||||
fcurve_loc_y = action.fcurves.new(loc_data_path, index=1, action_group=bone.name)
|
||||
fcurve_loc_z = action.fcurves.new(loc_data_path, index=2, action_group=bone.name)
|
||||
|
||||
for frame in translation_frames:
|
||||
i = frame.index
|
||||
t = (bind_mat @ Matrix.Translation(convert_vector_space(frame.translation))).translation
|
||||
|
||||
fcurve_loc_x.keyframe_points.insert(i,t.x)
|
||||
fcurve_loc_y.keyframe_points.insert(i,t.y)
|
||||
fcurve_loc_z.keyframe_points.insert(i,t.z)
|
||||
|
||||
arma.animation_data.action = action
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Create the msh hierachy. Armatures are not created here.
|
||||
def extract_models(scene: Scene, materials_map : Dict[str, bpy.types.Material]) -> Dict[str, bpy.types.Object]:
|
||||
|
||||
|
@ -195,5 +281,6 @@ def extract_scene(filepath: str, scene: Scene):
|
|||
for model in scene.models:
|
||||
if model.name in model_map:
|
||||
obj = model_map[model.name]
|
||||
obj.hide_set(model.hidden or get_is_model_hidden(obj))
|
||||
if get_is_model_hidden(obj) and len(obj.children) == 0:
|
||||
obj.hide_set(True)
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from mathutils import Vector
|
|||
from .msh_model import Model, Animation, ModelType
|
||||
from .msh_scene import Scene, SceneAABB
|
||||
from .msh_model_gather import gather_models
|
||||
from .msh_model_utilities import make_null, validate_geometry_segment, sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix, inject_dummy_data
|
||||
from .msh_model_utilities import sort_by_parent, has_multiple_root_models, reparent_model_roots, get_model_world_matrix, inject_dummy_data
|
||||
from .msh_model_triangle_strips import create_models_triangle_strips
|
||||
from .msh_material import *
|
||||
from .msh_material_gather import gather_materials
|
||||
|
@ -53,20 +53,6 @@ def create_scene(generate_triangle_strips: bool, apply_modifiers: bool, export_t
|
|||
for segment in model.geometry:
|
||||
segment.triangle_strips = segment.triangles
|
||||
|
||||
# After generating triangle strips we must prune any segments that don't have
|
||||
# them, or else ZE and most versions of ZETools will crash.
|
||||
|
||||
# We could also make models with no valid segments nulls, since they might as well be,
|
||||
# but that could have unforseeable consequences further down the modding pipeline
|
||||
# and is not necessary to avoid the aforementioned crashes...
|
||||
for model in scene.models:
|
||||
if model.geometry is not None:
|
||||
# Doing this in msh_model_gather would be messy and the presence/absence
|
||||
# of triangle strips is required for a validity check.
|
||||
model.geometry = [segment for segment in model.geometry if validate_geometry_segment(segment)]
|
||||
#if not model.geometry:
|
||||
# make_null(model)
|
||||
|
||||
if has_multiple_root_models(scene.models):
|
||||
scene.models = reparent_model_roots(scene.models)
|
||||
|
||||
|
|
|
@ -39,9 +39,12 @@ def pack_color(color) -> int:
|
|||
return packed
|
||||
|
||||
def unpack_color(color: int) -> List[float]:
|
||||
r = (color >> 16 & 0xFF) / 255.0
|
||||
g = (color >> 8 & 0xFF) / 255.0
|
||||
b = (color >> 0 & 0xFF) / 255.0
|
||||
a = (color >> 24 & 0xFF) / 255.0
|
||||
|
||||
mask = int(0x000000ff)
|
||||
|
||||
r = (color & (mask << 16)) / 255.0
|
||||
g = (color & (mask << 8)) / 255.0
|
||||
b = (color & mask) / 255.0
|
||||
a = (color & (mask << 24)) / 255.0
|
||||
|
||||
return [r,g,b,a]
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
""" Parses .tga.option and .msh.option files. Only used with the former as of now. """
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class MungeOptions:
|
||||
|
||||
def __init__(self, path_to_option_file):
|
||||
self.options = {}
|
||||
|
||||
if os.path.exists(path_to_option_file):
|
||||
with open(path_to_option_file, 'r') as option_file:
|
||||
option_text = option_file.read()
|
||||
|
||||
option_parts = option_text.split()
|
||||
|
||||
current_parameter = ""
|
||||
|
||||
for part in option_parts:
|
||||
if part.startswith("-"):
|
||||
current_parameter = part[1:]
|
||||
self.options[current_parameter] = ""
|
||||
elif current_parameter:
|
||||
current_value = self.options[current_parameter]
|
||||
# Keep adding to value in case there are vector options
|
||||
self.options[current_parameter] += part if not current_value else (" " + part)
|
||||
|
||||
def is_option_present(self, param):
|
||||
return param in self.options
|
||||
|
||||
def get_bool(self, param, default=False):
|
||||
return True if param in self.options else default
|
||||
|
||||
def get_float(self, param, default=0.0):
|
||||
if param in self.options:
|
||||
try:
|
||||
result = float(self.options[param])
|
||||
except:
|
||||
result = default
|
||||
finally:
|
||||
return result
|
||||
else:
|
||||
return default
|
||||
|
||||
def get_string(self, param, default=""):
|
||||
return self.options.get(param, default)
|
Loading…
Reference in New Issue