Skip to content

Commit

Permalink
Release - 1.7.4
Browse files Browse the repository at this point in the history
  • Loading branch information
simon50keda committed Mar 29, 2017
1 parent e3dc726 commit 06d7ffc
Show file tree
Hide file tree
Showing 22 changed files with 286 additions and 164 deletions.
2 changes: 1 addition & 1 deletion addon/io_scs_tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"name": "SCS Tools",
"description": "Setup models, Import-Export SCS data format",
"author": "Simon Lusenc (50keda), Milos Zajic (4museman)",
"version": (1, 7, "0bc527c"),
"version": (1, 7, "40fb83b"),
"blender": (2, 78, 0),
"location": "File > Import-Export",
"wiki_url": "http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools",
Expand Down
35 changes: 11 additions & 24 deletions addon/io_scs_tools/exp/pic.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,44 +123,31 @@ def _fill_part_sections(locator_list, used_parts):
:rtype: list
"""

parts = []
locator_parts = {}
for locator_i, locator in enumerate(locator_list):
scs_part = locator.scs_props.scs_part
scs_part = used_parts.ensure_part(locator)

if scs_part not in locator_parts:
locator_parts[scs_part] = [locator_i]
else:
locator_parts[scs_part].append(locator_i)

if scs_part not in parts:
parts.append(scs_part)

# PART SECTIONS
ordered_part_sections = []
for part_name in used_parts.get_as_list():

piece_count = 0
pieces = None
locator_count = 0
locators = None
pieces = None # just make zero pieces as we are not using that in PIC
piece_count = 0

# fill up part data from PIC data
if part_name in parts:

# PIECE COUNT
piece_count = 0
# PIECES
pieces = None

# LOCATOR COUNT
if part_name in locator_parts:
locator_count = len(locator_parts[part_name])
# LOCATORS
locators = None
if part_name in locator_parts:
if locator_parts[part_name]:
locators = locator_parts[part_name]
# LOCATOR COUNT
if part_name in locator_parts:
locator_count = len(locator_parts[part_name])
# LOCATORS
locators = None
if part_name in locator_parts:
if locator_parts[part_name]:
locators = locator_parts[part_name]

# MAKE SECTION
part_section = _SectionData("Part")
Expand Down
35 changes: 29 additions & 6 deletions addon/io_scs_tools/exp/pim/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim")

pim_header = Header(format_type, format_version, root_object.name)
pim_global = Globall(used_parts.count(), skeleton_filepath)
pim_global = Globall(len(used_parts.get_as_list()), skeleton_filepath)

pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials
""":type: dict[str, Material]"""
Expand All @@ -105,16 +105,30 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec

bones = skin = skin_stream = None
if is_skin_used:

invalid_bone_names = set() # set for saving bones with invalid names, they are used for reporting to user

# create bones data section
bones = Bones()
for bone in armature_object.data.bones:
bones.add_bone(bone.name)
used_bones.add(bone.name)

# do bones name checks
if _name_utils.tokenize_name(bone.name) != bone.name:
invalid_bone_names.add(bone.name)

# create skin data section
skin_stream = SkinStream(SkinStream.Types.POSITION)
skin = Skin(skin_stream)

# report invalid bone names
if len(invalid_bone_names) > 0:
lprint("W Invalid bone names detected, max. length of valid bone name is 12 and must consists from [a-z, 0-9 and _ ] characters.\n\t "
"Conversion will generalize names, however expect problems by re-import! List of invalid bone names for %r:\n\t "
"%r",
(armature_object.name, list(invalid_bone_names)))

# create mesh object data sections
for mesh_obj in mesh_objects:

Expand Down Expand Up @@ -151,6 +165,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
missing_uv_layers = {} # stores missing uvs specified by materials of this object
missing_vcolor = False # indicates if object is missing vertex color layer
missing_vcolor_a = False # indicates if object is missing vertex color alpha layer
missing_skinned_verts = set() # indicates if object is having only partial skin, which is not allowed in our models

for poly in mesh.polygons:

Expand Down Expand Up @@ -275,17 +290,23 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
if is_skin_used:
# get skinning data for vertex and save it to skin stream
bone_weights = {}
bone_weights_sum = 0
for v_group_entry in mesh.vertices[vert_i].groups:
bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name)
bone_weight = v_group_entry.weight

# proceed only if bone exists in our armature
if bone_indx != -1:
bone_weights[bone_indx] = bone_weight
bone_weights_sum += bone_weight

skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights)
skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum)
skin_stream.add_entry(skin_entry)

# report un-skinned vertices (no bones or zero sum weight)
if bone_weights_sum <= 0:
missing_skinned_verts.add(vert_i)

# save to terrain points storage if present in correct vertex group
for group in mesh.vertices[vert_i].groups:

Expand Down Expand Up @@ -333,7 +354,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
_mesh_utils.cleanup_mesh(mesh)

# create part if it doesn't exists yet
part_name = mesh_obj.scs_props.scs_part
part_name = used_parts.ensure_part(mesh_obj)
if part_name not in pim_parts:
pim_parts[part_name] = Part(part_name)

Expand All @@ -357,6 +378,9 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
if missing_vcolor_a:
lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)",
(mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix))
if len(missing_skinned_verts) > 0:
lprint("E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!",
(mesh_obj.name, root_object.name, len(missing_skinned_verts)))

# report missing data for whole model
if len(missing_mappings_data) > 0:
Expand Down Expand Up @@ -393,9 +417,8 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec
locator.set_scale(sca)

# create part if it doesn't exists yet
part_name = loc_obj.scs_props.scs_part
part_name = used_parts.ensure_part(loc_obj)
if part_name not in pim_parts:
assert used_parts.is_present(part_name)
pim_parts[part_name] = Part(part_name)

# add locator to part
Expand All @@ -416,7 +439,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec

for part_name in used_parts.get_as_list():

# export all parts even empty ones gathered from PIC and PIP
# export all parts even empty ones used only in PIC and/or PIP
if part_name in pim_parts:
pim_container.append(pim_parts[part_name].get_as_section())
else:
Expand Down
18 changes: 5 additions & 13 deletions addon/io_scs_tools/exp/pim/skin_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,7 @@ class Entry:
__bone_weights = None
__clones = None

@staticmethod
def __get_weight_sum__(bone_weights):
weight_sum = 0
for bone_weight in bone_weights.values():
weight_sum += bone_weight

return weight_sum

def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights):
def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights, bone_weights_sum):
"""Create new entry instance with given indices, position and bone weights.
:param piece_index: index of the piece inside SCS game object
:type piece_index: int
Expand All @@ -60,19 +52,19 @@ def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights):
:type vertex_pos: tuple
:param bone_weights: bone weights dictonary where key is bone index and value is bone weight
:type bone_weights: dict
:param bone_weights_sum: summary of weights of all the bones vertex is skinned to
:type bone_weights_sum: float
"""

self.__bone_weights = OrderedDict()
self.__clones = OrderedDict()

self.__position = vertex_pos

weight_sum = self.__get_weight_sum__(bone_weights)

# normalize if sum of weights is bigger than one
if weight_sum > 1:
if bone_weights_sum > 1:
for bone_indx in bone_weights.keys():
self.__bone_weights[bone_indx] = bone_weights[bone_indx] / weight_sum
self.__bone_weights[bone_indx] = bone_weights[bone_indx] / bone_weights_sum
else:
for bone_indx in bone_weights.keys():
self.__bone_weights[bone_indx] = bone_weights[bone_indx]
Expand Down
6 changes: 4 additions & 2 deletions addon/io_scs_tools/exp/pip/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def __get_trigger_point__(pip_trigger_points, locator_name):
return pip_trigger_points[locator_name]


def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_points):
def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_parts, used_terrain_points):
"""Exports PIP file from given locator list.
:param prefab_locator_list:
Expand All @@ -125,6 +125,8 @@ def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_
:type filename: str
:param offset_matrix: offset matrix for locators
:type offset_matrix: mathutils.Matrix
:param used_parts: parts transitional structure for storing used parts inside this PIP export
:type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans
:param used_terrain_points: terrain points transitional structure for accessing terrain points stored during PIM export
:type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans
:return: True if successfull; otherwise False
Expand Down Expand Up @@ -274,7 +276,7 @@ def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_
""":type: io_scs_tools.properties.object.ObjectSCSTools"""

# create sign and set properties
sign = Sign(locator.name, locator_scs_props.scs_part)
sign = Sign(locator.name, used_parts.ensure_part(locator))

pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world)
sign.set_position(pos)
Expand Down
57 changes: 50 additions & 7 deletions addon/io_scs_tools/exp/pip/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
# Copyright (C) 2015: SCS Software

from collections import OrderedDict

from mathutils import Vector
from mathutils.geometry import distance_point_to_plane
from io_scs_tools.consts import PrefabLocators as _PL_consts
from io_scs_tools.exp.pip.node_stream import Stream
from io_scs_tools.internals.structure import SectionData as _SectionData
Expand Down Expand Up @@ -91,6 +92,9 @@ def __prepare_streams__(self):
"""Converts terrain points from variant mapped indices to PIP streams and stores them.
"""

# first prepare terrain points for export
self.__prepare_terrain_points__()

# ensure empty streams
pos_stream = Stream(Stream.Types.POSITION)
nor_stream = Stream(Stream.Types.NORMAL)
Expand Down Expand Up @@ -130,6 +134,22 @@ def __prepare_streams__(self):
if variant_stream.get_size() > 0:
self.__tp_streams[Stream.Types.VARIANT_BLOCK] = variant_stream

def __prepare_terrain_points__(self):
"""Reverses the order of terrain points if last point is closer to the node in it's forward direction.
"""

for variant_index in self.__tp_per_variant:

# now if tail is closer to node on it's forward axis, we reverse list
plane_co = Vector(self.__position)
plane_no = Vector(self.__direction)

head_distance = distance_point_to_plane(self.__tp_per_variant[variant_index][0][0], plane_co, plane_no)
tail_distance = distance_point_to_plane(self.__tp_per_variant[variant_index][-1][0], plane_co, plane_no)

if not (head_distance <= tail_distance - 0.001): # 0.001 taken from Maya exporter
self.__tp_per_variant[variant_index].reverse()

def set_input_lane(self, index, curve_index):
"""Set the curve index for given input lane.
Expand Down Expand Up @@ -197,18 +217,41 @@ def add_terrain_point(self, position, normal, variant_index=-1):
if position is None and normal is None:
return

# ordering with insertion by position where closest terrain point
# shall be first in the list and farthest terrain point the last
# find nearest existing terrain point
i = 0
while i < len(self.__tp_per_variant[variant_index]):
smallest_dist = float("inf")
smallest_dist_i = 0
tp_count = len(self.__tp_per_variant[variant_index])
while i < tp_count:

pos, normal = self.__tp_per_variant[variant_index][i]
if get_distance(self.__position, position) < get_distance(self.__position, pos):
break
curr_distance = get_distance(position, pos)
if curr_distance < smallest_dist:
smallest_dist = curr_distance
smallest_dist_i = i

i += 1

self.__tp_per_variant[variant_index].insert(i, (position, normal))
# depending on index of the nearest point insert new point
if smallest_dist_i == 0: # no terrain points yet or the nearest is first just put it at start
self.__tp_per_variant[variant_index].insert(0, (position, normal))
elif smallest_dist_i == tp_count - 1: # last is the nearest put it at the back
self.__tp_per_variant[variant_index].append((position, normal))
else:

# now this is a tricky one: once nearest point is in the middle.
# With that in mind take previous and next existing points and calculate
# to which new point is closer:
# 1. if closer to previous we have to insert new point before the nearest
# 2. if closer to next we have to insert new point after the nearest

ahead_tp = self.__tp_per_variant[variant_index][smallest_dist_i - 1]
behind_tp = self.__tp_per_variant[variant_index][smallest_dist_i + 1]

if get_distance(behind_tp[0], position) < get_distance(ahead_tp[0], position):
smallest_dist_i += 1

self.__tp_per_variant[variant_index].insert(smallest_dist_i, (position, normal))

Node.__global_tp_counter += 1

Expand Down
15 changes: 10 additions & 5 deletions addon/io_scs_tools/exp/pit.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,8 @@ def _fill_part_list(parts, used_parts_names, all_parts=False):
"""
part_list = []
for part_name in used_parts_names:

part_written = False
for part in parts:

if part.name == part_name:
Expand All @@ -320,21 +322,25 @@ def _fill_part_list(parts, used_parts_names, all_parts=False):
part_atr.append(('INT', 'visible', include))

part_list.append((part.name, part_atr), )
part_written = True

if not part_written:
lprint("E Part %r from collected parts not avaliable in variant parts inventory, expect problems by conversion!", (part_name,))

return part_list


def export(root_object, filepath, used_materials, used_parts):
def export(root_object, filepath, used_parts, used_materials):
"""Export PIT.
:param root_object: SCS root object
:type root_object: bpy.types.Object
:param filepath: PIT file path
:type filepath: str
:param used_materials: materials transitional structure for accessing stored materials from PIM
:type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans
:param used_parts: parts transitional structure for accessing stored parts from PIM, PIC and PIP
:type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans
:param used_materials: materials transitional structure for accessing stored materials from PIM
:type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans
:return: True if successful; False otherwise;
:rtype: bool
"""
Expand Down Expand Up @@ -617,7 +623,6 @@ def export(root_object, filepath, used_materials, used_parts):

# PARTS AND VARIANTS...
used_parts_names = used_parts.get_as_list()
part_list_cnt = len(used_parts_names)
if len(root_object.scs_object_variant_inventory) == 0:
# If there is no Variant, add the Default one...
part_list = _fill_part_list(root_object.scs_object_part_inventory, used_parts_names, all_parts=True)
Expand All @@ -633,7 +638,7 @@ def export(root_object, filepath, used_materials, used_parts):
# part_sections = fill_part_section(part_list)
variant_section = _fill_variant_sections(variant_list)
comment_header_section = _fill_comment_header_section(look_list, variant_list)
global_section = _fill_global_section(len(look_list), len(variant_list), part_list_cnt, len(used_materials_pairs))
global_section = _fill_global_section(len(look_list), len(variant_list), len(used_parts_names), len(used_materials_pairs))

# DATA ASSEMBLING
pit_container = [comment_header_section, header_section, global_section]
Expand Down
Loading

0 comments on commit 06d7ffc

Please sign in to comment.