Skip to content

Commit

Permalink
Release - 1.9
Browse files Browse the repository at this point in the history
  • Loading branch information
simon50keda committed Jun 28, 2017
1 parent 22b4ac4 commit 5e8bc9d
Show file tree
Hide file tree
Showing 44 changed files with 4,608 additions and 619 deletions.
23 changes: 18 additions & 5 deletions addon/io_scs_tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"name": "SCS Tools",
"description": "Setup models, Import-Export SCS data format",
"author": "Simon Lusenc (50keda), Milos Zajic (4museman)",
"version": (1, 8, "2926820"),
"version": (1, 9, "12a2cec"),
"blender": (2, 78, 0),
"location": "File > Import-Export",
"wiki_url": "http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools",
Expand Down Expand Up @@ -68,7 +68,7 @@ class ImportSCS(bpy.types.Operator, ImportHelper):
)

directory = StringProperty()
filename_ext = "*.pim"
filename_ext = "*.pim;*.pim.ef;"
filter_glob = StringProperty(default=filename_ext, options={'HIDDEN'})

def check(self, context):
Expand Down Expand Up @@ -107,12 +107,20 @@ def execute(self, context):
for filepath in paths:

result = False
if filepath.endswith("pim"):
if filepath.endswith(".pim") or filepath.endswith(".pim.ef"):

# check extension for DEF format and properly assign it to name suffix
ef_format_suffix = ""
if filepath.endswith(".ef"):
ef_format_suffix = ".ef"
filepath = filepath[:-len(ef_format_suffix)]

filepath = filepath[:-4]

try:

_get_scs_globals().import_in_progress = True
result = _pix_import.load(context, filepath)
result = _pix_import.load(context, filepath, name_suffix=ef_format_suffix)
_get_scs_globals().import_in_progress = False

except Exception as e:
Expand Down Expand Up @@ -265,8 +273,13 @@ def execute(self, context):
elif export_scope == 'scenes':
init_obj_list = tuple(bpy.data.objects)

# check extension for EF format and properly assign it to name suffix
ef_name_suffix = ""
if _get_scs_globals().export_output_type == "EF":
ef_name_suffix = ".ef"

try:
result = _export.batch_export(self, init_obj_list, menu_filepath=filepath)
result = _export.batch_export(self, init_obj_list, name_suffix=ef_name_suffix, menu_filepath=filepath)
except Exception as e:

result = {"CANCELLED"}
Expand Down
15 changes: 15 additions & 0 deletions addon/io_scs_tools/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,3 +443,18 @@ class SCSLigthing:

diffuse_lamp_name = ".scs_diffuse"
specular_lamp_name = ".scs_specular"


class PaintjobTools:
"""Constants for paintjob tools.
"""

uvs_name_2nd = "scs_paintjob_2nd"
"""2nd uvs layer name used during unification on export"""
uvs_name_3rd = "scs_paintjob_3rd"
"""3rd uvs layer name used during unification on export."""

model_refs_to_sii = ".scs_model_refs_to_sii_files"
"""Name of the property for saving references paths to models inside a group data-block."""
export_tag_obj_name = ".scs_export_group"
"""Name of the object inside the group which visibility tells us either group should be exported or no."""
6 changes: 4 additions & 2 deletions addon/io_scs_tools/exp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,16 @@
from io_scs_tools.utils.printout import lprint


def batch_export(operator_instance, init_obj_list, menu_filepath=None):
def batch_export(operator_instance, init_obj_list, name_suffix="", menu_filepath=None):
"""This function calls other sorting functions and depending on the resulting output
dictionary it exports all available 'SCS Game Objects' into specified locations.
:param operator_instance: operator from within this function is called (used for report)
:type operator_instance: bpy.types.Operator
:param init_obj_list: initial object list which should be exported
:type init_obj_list: tuple of Blender objects
:param name_suffix: files name suffix (exchange format is using .ef)
:type name_suffix: str
:param menu_filepath: filepath used from menu export
:type menu_filepath: str
"""
Expand Down Expand Up @@ -97,7 +99,7 @@ def batch_export(operator_instance, init_obj_list, menu_filepath=None):
if os.path.isdir(filepath) and _path_utils.startswith(filepath, scs_project_path) and scs_project_path != "":

# EXPORT ENTRY POINT
export_success = pix.export(filepath, root_object, game_object_list)
export_success = pix.export(filepath, name_suffix, root_object, game_object_list)

if export_success:
scs_game_objects_exported.append("> \"" + root_object.name + "\" exported to: '" + filepath + "'")
Expand Down
6 changes: 4 additions & 2 deletions addon/io_scs_tools/exp/pia.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def _fill_channel_sections(data_list, channel_type="BoneChannel"):
return sections


def export(scs_root_obj, armature, scs_animation, dirpath, skeleton_filepath):
def export(scs_root_obj, armature, scs_animation, dirpath, name_suffix, skeleton_filepath):
"""Exports PIA animation
:param scs_root_obj: root object of current animation
Expand All @@ -291,6 +291,8 @@ def export(scs_root_obj, armature, scs_animation, dirpath, skeleton_filepath):
:type scs_animation: io_scs_tools.properties.object.ObjectAnimationInventoryItem
:param dirpath: path to export
:type dirpath: str
:param name_suffix: file name suffix
:type name_suffix: str
:param skeleton_filepath: name of skeleton file that this animation works on
:type skeleton_filepath: str
"""
Expand Down Expand Up @@ -336,7 +338,7 @@ def export(scs_root_obj, armature, scs_animation, dirpath, skeleton_filepath):

# FILE EXPORT
ind = " "
filepath = os.path.join(dirpath, scs_animation.name + ".pia")
filepath = os.path.join(dirpath, scs_animation.name + ".pia" + name_suffix)

# print("************************************")
return _pix_container.write_data_to_file(pia_container, filepath, ind)
8 changes: 5 additions & 3 deletions addon/io_scs_tools/exp/pic.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,13 +249,15 @@ def _sort_collision_locators(collision_locator_list):
return box_coll_locators, sphere_coll_locators, capsule_coll_locators, cylinder_coll_locators, convex_coll_locators


def export(collision_locator_list, filepath, filename, used_parts):
def export(collision_locator_list, filepath, name_suffix, filename, used_parts):
"""Exports PIC colliders
:param collision_locator_list:
:type collision_locator_list:
:param filepath:
:type filepath:
:param name_suffix: file name suffix
:type name_suffix: str
:param filename:
:type filename:
:param used_parts: parts transitional structure for storing used parts inside this PIC export
Expand All @@ -281,7 +283,7 @@ def export(collision_locator_list, filepath, filename, used_parts):
part_sections = _fill_part_sections(collision_locator_list, used_parts)
collision_locator_sections = _fill_collision_locator_sections(collision_locator_list)
global_section = _fill_global_section(len_vertices, len_faces, materials,
len(piece_sections), len(part_sections), len(collision_locator_sections))
len(piece_sections), used_parts.count(), len(collision_locator_sections))

# DATA ASSEMBLING
pic_container = [header_section, global_section]
Expand All @@ -299,7 +301,7 @@ def export(collision_locator_list, filepath, filename, used_parts):

# FILE EXPORT
ind = " "
pic_filepath = str(filepath + ".pic")
pic_filepath = str(filepath + ".pic" + name_suffix)
result = _pix_container.write_data_to_file(pic_container, pic_filepath, ind)

# print("************************************")
Expand Down
104 changes: 54 additions & 50 deletions addon/io_scs_tools/exp/pim/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,13 @@
from io_scs_tools.utils.printout import lprint


def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators,
def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepath, mesh_objects, model_locators,
used_parts, used_materials, used_bones, used_terrain_points):
"""Executes export of PIM file for given data.
:param dirpath: directory path for PIM file
:type dirpath: str
:param name_suffix: file name suffix
:type name_suffix: str
:param root_object: Blender SCS Root empty object
:type root_object: bpy.types.Object
:param armature_object: Blender Aramture object belonging to this SCS game object
Expand Down Expand Up @@ -80,17 +82,12 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec

scs_globals = _get_scs_globals()

if scs_globals.export_output_type == "5":
format_version = 5
format_type = ""
else:
format_version = 1
format_type = "def"
format_version = 5

is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim")

pim_header = Header(format_type, format_version, root_object.name)
pim_global = Globall(len(used_parts.get_as_list()), skeleton_filepath)
pim_header = Header("", format_version, root_object.name)
pim_global = Globall(used_parts.count(), skeleton_filepath)

pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials
""":type: dict[str, Material]"""
Expand Down Expand Up @@ -134,6 +131,8 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
# create mesh object data sections
for mesh_obj in mesh_objects:

lprint("I Preparing mesh object: %r ...", (mesh_obj.name,))

vert_groups = mesh_obj.vertex_groups

mesh_pieces = collections.OrderedDict()
Expand Down Expand Up @@ -173,18 +172,19 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
tangent_transf_mat = _scs_to_blend_matrix().inverted()
""":type: mathutils.Matrix"""

# get initial mesh and vertex groups for it
# get initial mesh & extra copy of the mesh for normals
mesh = _object_utils.get_mesh(mesh_obj)
_mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat)
mesh_for_normals = mesh.copy()

# get extra mesh only for normals
mesh_for_normals = _object_utils.get_mesh(mesh_obj)
# prepare meshes
faces_mapping = _mesh_utils.bm_prepare_mesh_for_export(mesh, mesh_transf_mat, triangulate=True)
mesh_for_normals.calc_normals_split()

missing_uv_layers = {} # stores missing uvs specified by materials of this object
missing_vcolor = False # indicates if object is missing vertex color layer
missing_vcolor_a = False # indicates if object is missing vertex color alpha layer
missing_skinned_verts = set() # indicates if object is having only partial skin, which is not allowed in our models
has_unnormalized_skin = False # indicates if object has vertices which bones weight sum is smaller then one

for poly in mesh.polygons:

Expand All @@ -193,7 +193,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
# check material existence and decide what material name and effect has to be used
if mat_index >= len(mesh_obj.material_slots) or mesh_obj.material_slots[mat_index].material is None: # no material or invalid index
material = None
pim_mat_name = "_not_existing_material_"
pim_mat_name = "_default_material_-_default_settings_"
pim_mat_effect = "eut2.dif"
objects_with_default_material[mesh_obj.name] = 1
else:
Expand Down Expand Up @@ -228,9 +228,15 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
mesh_piece = mesh_pieces[pim_mat_name]
""":type: Piece"""

first_loop_pvert_i = None # storing first loop piece vertex index for usage as first vertex of each triangle of the polygon
prev_tris_last_pvert_i = None # storing last piece vertex index for usage as second vertex of each next triangle of the polygon
for i, loop_i in enumerate(poly.loop_indices):
# get polygon loop indices for normals depending on mapped triangulated face
if poly.index in faces_mapping:
normals_poly_loop_indices = list(mesh_for_normals.polygons[faces_mapping[poly.index]].loop_indices)
else:
normals_poly_loop_indices = list(mesh_for_normals.polygons[poly.index].loop_indices)

# vertex data
triangle_pvert_indices = [] # storing vertex indices for this polygon triangle
for loop_i in poly.loop_indices:

loop = mesh.loops[loop_i]
""":type: bpy.types.MeshLoop"""
Expand All @@ -240,10 +246,19 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
# 1. position -> mesh.vertices[loop.vertex_index].co
position = tuple(pos_transf_mat * mesh.vertices[vert_i].co)

# 2. normal -> loop.normal -> calc_normals_split() has to be called before
# NOTE: we are using normals from original mesh
normal = nor_transf_mat * mesh_for_normals.loops[loop_i].normal
normal = tuple(Vector(normal).normalized())
# 2. normal -> mesh_for_normals.loops[loop_i].normal -> calc_normals_split() has to be called before
normal = (0, 0, 0)
for i, normals_poly_loop_i in enumerate(normals_poly_loop_indices):
normal_loop = mesh_for_normals.loops[normals_poly_loop_i]

# match by vertex index as triangle will for sure have three unique vertices
if vert_i == normal_loop.vertex_index:
normal = nor_transf_mat * normal_loop.normal
normal = tuple(Vector(normal).normalized())
del normals_poly_loop_indices[i]
break
else:
lprint("E Normals data gathering went wrong, expect corrupted mesh! Shouldn't happen...")

# 3. uvs -> uv_lay = mesh.uv_layers[0].data; uv_lay[loop_i].uv
uvs = []
Expand Down Expand Up @@ -310,7 +325,10 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
# 6. There we go, vertex data collected! Now create internal vertex index, for triangle and skin stream construction
piece_vert_index = mesh_piece.add_vertex(vert_i, position, normal, uvs, uvs_aliases, vcol, tangent)

# 7. Get skinning data for vertex and save it to skin stream
# 7. Add vertex to triangle creation list
triangle_pvert_indices.append(piece_vert_index)

# 8. Get skinning data for vertex and save it to skin stream
if is_skin_used:
bone_weights = {}
bone_weights_sum = 0
Expand All @@ -326,35 +344,11 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum)
skin_stream.add_entry(skin_entry)

# report un-skinned vertices (no bones or zero sum weight)
# report un-skinned vertices (no bones or zero sum weight) or badly skinned model
if bone_weights_sum <= 0:
missing_skinned_verts.add(vert_i)

# 8. Triangle construction!
#
# We are using totally naive method for triangulation, taking polygon loops one by one
# and once we have enough vertices, we create triangle.
#
if i < 2: # on start only save first two loops piece vertex indices

if i == 0:
first_loop_pvert_i = piece_vert_index
else:
prev_tris_last_pvert_i = piece_vert_index

else: # each next loop requires triangle creation

# 1. construct vertices of triangle:
tris_pvert_indices = [first_loop_pvert_i, prev_tris_last_pvert_i, piece_vert_index]

# 2. save current piece vertex index as last triangle vertex for possible next triangles
prev_tris_last_pvert_i = piece_vert_index

# 3. Triangle creation, at last!
if face_flip:
mesh_piece.add_triangle(tuple(tris_pvert_indices))
else:
mesh_piece.add_triangle(tuple(tris_pvert_indices[::-1])) # yep it's weird but it simply works vice versa
elif bone_weights_sum < 1:
has_unnormalized_skin = True

# Addition - Terrain Points: save vertex to terrain points storage, if present in correct vertex group
for group in mesh.vertices[vert_i].groups:
Expand Down Expand Up @@ -397,6 +391,12 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
used_terrain_points.add(variant_i, node_index, position, normal)
break

# triangles
if face_flip:
mesh_piece.add_triangle(tuple(triangle_pvert_indices))
else:
mesh_piece.add_triangle(tuple(triangle_pvert_indices[::-1])) # yep it's weird but it simply works vice versa

# free normals calculations
_mesh_utils.cleanup_mesh(mesh)
_mesh_utils.cleanup_mesh(mesh_for_normals)
Expand Down Expand Up @@ -435,6 +435,10 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
if len(missing_skinned_verts) > 0:
lprint("E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!",
(mesh_obj.name, root_object.name, len(missing_skinned_verts)))
if has_unnormalized_skin:
lprint("W Object %r from SCS Root %r has unormalized skinning, exporting normalized weights!\n\t "
"You can normalize weights by selecting object & executing 'Normalize All Vertex Groups'.",
(mesh_obj.name, root_object.name))

# report missing data for whole model
if len(missing_mappings_data) > 0:
Expand Down Expand Up @@ -512,5 +516,5 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec

# write to file
ind = " "
pim_filepath = os.path.join(dirpath, root_object.name + ".pim")
pim_filepath = os.path.join(dirpath, root_object.name + ".pim" + name_suffix)
return _pix_container.write_data_to_file(pim_container, pim_filepath, ind)
6 changes: 3 additions & 3 deletions addon/io_scs_tools/exp/pim/piece.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,12 @@ def __calc_vertex_hash(index, uvs, rgba, tangent):

vertex_hash = str(index)
for uv in uvs:
vertex_hash += frmt % uv[0] + frmt % uv[1]
vertex_hash = ''.join((vertex_hash, frmt % uv[0], frmt % uv[1]))

vertex_hash += frmt % rgba[0] + frmt % rgba[1] + frmt % rgba[2] + frmt % rgba[3]
vertex_hash = ''.join((vertex_hash, frmt % rgba[0], frmt % rgba[1], frmt % rgba[2], frmt % rgba[3]))

if tangent:
vertex_hash += frmt % tangent[0] + frmt % tangent[1] + frmt % tangent[2] + frmt % tangent[3]
vertex_hash = ''.join((vertex_hash, frmt % tangent[0], frmt % tangent[1], frmt % tangent[2], frmt % tangent[3]))

return vertex_hash

Expand Down
Loading

0 comments on commit 5e8bc9d

Please sign in to comment.