From 06d7ffc8c9878f70fef96a94572bcb0a266ba881 Mon Sep 17 00:00:00 2001 From: Simon Lusenc Date: Wed, 29 Mar 2017 15:30:38 +0200 Subject: [PATCH] Release - 1.7.4 --- addon/io_scs_tools/__init__.py | 2 +- addon/io_scs_tools/exp/pic.py | 35 ++--- addon/io_scs_tools/exp/pim/exporter.py | 35 ++++- addon/io_scs_tools/exp/pim/skin_stream.py | 18 +-- addon/io_scs_tools/exp/pip/exporter.py | 6 +- addon/io_scs_tools/exp/pip/node.py | 57 +++++++- addon/io_scs_tools/exp/pit.py | 15 ++- addon/io_scs_tools/exp/pix.py | 16 +-- .../exp/transition_structs/parts.py | 85 +++++++++--- addon/io_scs_tools/imp/pim.py | 13 +- .../internals/containers/config.py | 2 +- .../internals/containers/parsers/mat.py | 2 +- .../internals/containers/parsers/pix.py | 2 +- .../internals/containers/parsers/sii.py | 4 +- .../internals/containers/parsers/tobj.py | 3 +- .../io_scs_tools/internals/containers/pix.py | 4 +- .../io_scs_tools/internals/containers/tobj.py | 2 +- .../internals/containers/writers/pix.py | 2 +- .../internals/preview_models/__init__.py | 13 ++ addon/io_scs_tools/operators/scene.py | 4 +- addon/io_scs_tools/operators/wm.py | 7 +- addon/io_scs_tools/operators/world.py | 123 +++++++++--------- 22 files changed, 286 insertions(+), 164 deletions(-) diff --git a/addon/io_scs_tools/__init__.py b/addon/io_scs_tools/__init__.py index 8ef9ff9..959e515 100644 --- a/addon/io_scs_tools/__init__.py +++ b/addon/io_scs_tools/__init__.py @@ -22,7 +22,7 @@ "name": "SCS Tools", "description": "Setup models, Import-Export SCS data format", "author": "Simon Lusenc (50keda), Milos Zajic (4museman)", - "version": (1, 7, "0bc527c"), + "version": (1, 7, "40fb83b"), "blender": (2, 78, 0), "location": "File > Import-Export", "wiki_url": "http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools", diff --git a/addon/io_scs_tools/exp/pic.py b/addon/io_scs_tools/exp/pic.py index 8dd7825..0f55dc8 100644 --- a/addon/io_scs_tools/exp/pic.py +++ b/addon/io_scs_tools/exp/pic.py @@ -123,44 +123,31 @@ def _fill_part_sections(locator_list, used_parts): :rtype: list """ - parts = [] locator_parts = {} for locator_i, locator in enumerate(locator_list): - scs_part = locator.scs_props.scs_part + scs_part = used_parts.ensure_part(locator) if scs_part not in locator_parts: locator_parts[scs_part] = [locator_i] else: locator_parts[scs_part].append(locator_i) - if scs_part not in parts: - parts.append(scs_part) - # PART SECTIONS ordered_part_sections = [] for part_name in used_parts.get_as_list(): - piece_count = 0 - pieces = None locator_count = 0 - locators = None + pieces = None # just make zero pieces as we are not using that in PIC + piece_count = 0 - # fill up part data from PIC data - if part_name in parts: - - # PIECE COUNT - piece_count = 0 - # PIECES - pieces = None - - # LOCATOR COUNT - if part_name in locator_parts: - locator_count = len(locator_parts[part_name]) - # LOCATORS - locators = None - if part_name in locator_parts: - if locator_parts[part_name]: - locators = locator_parts[part_name] + # LOCATOR COUNT + if part_name in locator_parts: + locator_count = len(locator_parts[part_name]) + # LOCATORS + locators = None + if part_name in locator_parts: + if locator_parts[part_name]: + locators = locator_parts[part_name] # MAKE SECTION part_section = _SectionData("Part") diff --git a/addon/io_scs_tools/exp/pim/exporter.py b/addon/io_scs_tools/exp/pim/exporter.py index 9435842..6b6df49 100644 --- a/addon/io_scs_tools/exp/pim/exporter.py +++ b/addon/io_scs_tools/exp/pim/exporter.py @@ -89,7 +89,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec is_skin_used = (armature_object and root_object.scs_props.scs_root_animated == "anim") pim_header = Header(format_type, format_version, root_object.name) - pim_global = Globall(used_parts.count(), skeleton_filepath) + pim_global = Globall(len(used_parts.get_as_list()), skeleton_filepath) pim_materials = collections.OrderedDict() # dict of Material class instances representing used materials """:type: dict[str, Material]""" @@ -105,16 +105,30 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec bones = skin = skin_stream = None if is_skin_used: + + invalid_bone_names = set() # set for saving bones with invalid names, they are used for reporting to user + # create bones data section bones = Bones() for bone in armature_object.data.bones: bones.add_bone(bone.name) used_bones.add(bone.name) + # do bones name checks + if _name_utils.tokenize_name(bone.name) != bone.name: + invalid_bone_names.add(bone.name) + # create skin data section skin_stream = SkinStream(SkinStream.Types.POSITION) skin = Skin(skin_stream) + # report invalid bone names + if len(invalid_bone_names) > 0: + lprint("W Invalid bone names detected, max. length of valid bone name is 12 and must consists from [a-z, 0-9 and _ ] characters.\n\t " + "Conversion will generalize names, however expect problems by re-import! List of invalid bone names for %r:\n\t " + "%r", + (armature_object.name, list(invalid_bone_names))) + # create mesh object data sections for mesh_obj in mesh_objects: @@ -151,6 +165,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec missing_uv_layers = {} # stores missing uvs specified by materials of this object missing_vcolor = False # indicates if object is missing vertex color layer missing_vcolor_a = False # indicates if object is missing vertex color alpha layer + missing_skinned_verts = set() # indicates if object is having only partial skin, which is not allowed in our models for poly in mesh.polygons: @@ -275,6 +290,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec if is_skin_used: # get skinning data for vertex and save it to skin stream bone_weights = {} + bone_weights_sum = 0 for v_group_entry in mesh.vertices[vert_i].groups: bone_indx = bones.get_bone_index(vert_groups[v_group_entry.group].name) bone_weight = v_group_entry.weight @@ -282,10 +298,15 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec # proceed only if bone exists in our armature if bone_indx != -1: bone_weights[bone_indx] = bone_weight + bone_weights_sum += bone_weight - skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights) + skin_entry = SkinStream.Entry(mesh_piece.get_index(), piece_vert_index, position, bone_weights, bone_weights_sum) skin_stream.add_entry(skin_entry) + # report un-skinned vertices (no bones or zero sum weight) + if bone_weights_sum <= 0: + missing_skinned_verts.add(vert_i) + # save to terrain points storage if present in correct vertex group for group in mesh.vertices[vert_i].groups: @@ -333,7 +354,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec _mesh_utils.cleanup_mesh(mesh) # create part if it doesn't exists yet - part_name = mesh_obj.scs_props.scs_part + part_name = used_parts.ensure_part(mesh_obj) if part_name not in pim_parts: pim_parts[part_name] = Part(part_name) @@ -357,6 +378,9 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec if missing_vcolor_a: lprint("W Object %r is missing vertex color alpha layer with name %r! Default alpha will be exported (0.5)", (mesh_obj.name, _MESH_consts.default_vcol + _MESH_consts.vcol_a_suffix)) + if len(missing_skinned_verts) > 0: + lprint("E Object %r from SCS Root %r has %s vertices which are not skinned to any bone, expect errors during conversion!", + (mesh_obj.name, root_object.name, len(missing_skinned_verts))) # report missing data for whole model if len(missing_mappings_data) > 0: @@ -393,9 +417,8 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec locator.set_scale(sca) # create part if it doesn't exists yet - part_name = loc_obj.scs_props.scs_part + part_name = used_parts.ensure_part(loc_obj) if part_name not in pim_parts: - assert used_parts.is_present(part_name) pim_parts[part_name] = Part(part_name) # add locator to part @@ -416,7 +439,7 @@ def execute(dirpath, root_object, armature_object, skeleton_filepath, mesh_objec for part_name in used_parts.get_as_list(): - # export all parts even empty ones gathered from PIC and PIP + # export all parts even empty ones used only in PIC and/or PIP if part_name in pim_parts: pim_container.append(pim_parts[part_name].get_as_section()) else: diff --git a/addon/io_scs_tools/exp/pim/skin_stream.py b/addon/io_scs_tools/exp/pim/skin_stream.py index 4078937..320f013 100644 --- a/addon/io_scs_tools/exp/pim/skin_stream.py +++ b/addon/io_scs_tools/exp/pim/skin_stream.py @@ -42,15 +42,7 @@ class Entry: __bone_weights = None __clones = None - @staticmethod - def __get_weight_sum__(bone_weights): - weight_sum = 0 - for bone_weight in bone_weights.values(): - weight_sum += bone_weight - - return weight_sum - - def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights): + def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights, bone_weights_sum): """Create new entry instance with given indices, position and bone weights. :param piece_index: index of the piece inside SCS game object :type piece_index: int @@ -60,6 +52,8 @@ def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights): :type vertex_pos: tuple :param bone_weights: bone weights dictonary where key is bone index and value is bone weight :type bone_weights: dict + :param bone_weights_sum: summary of weights of all the bones vertex is skinned to + :type bone_weights_sum: float """ self.__bone_weights = OrderedDict() @@ -67,12 +61,10 @@ def __init__(self, piece_index, vertex_index, vertex_pos, bone_weights): self.__position = vertex_pos - weight_sum = self.__get_weight_sum__(bone_weights) - # normalize if sum of weights is bigger than one - if weight_sum > 1: + if bone_weights_sum > 1: for bone_indx in bone_weights.keys(): - self.__bone_weights[bone_indx] = bone_weights[bone_indx] / weight_sum + self.__bone_weights[bone_indx] = bone_weights[bone_indx] / bone_weights_sum else: for bone_indx in bone_weights.keys(): self.__bone_weights[bone_indx] = bone_weights[bone_indx] diff --git a/addon/io_scs_tools/exp/pip/exporter.py b/addon/io_scs_tools/exp/pip/exporter.py index 66f6801..128c67a 100644 --- a/addon/io_scs_tools/exp/pip/exporter.py +++ b/addon/io_scs_tools/exp/pip/exporter.py @@ -114,7 +114,7 @@ def __get_trigger_point__(pip_trigger_points, locator_name): return pip_trigger_points[locator_name] -def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_points): +def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_parts, used_terrain_points): """Exports PIP file from given locator list. :param prefab_locator_list: @@ -125,6 +125,8 @@ def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_ :type filename: str :param offset_matrix: offset matrix for locators :type offset_matrix: mathutils.Matrix + :param used_parts: parts transitional structure for storing used parts inside this PIP export + :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :param used_terrain_points: terrain points transitional structure for accessing terrain points stored during PIM export :type used_terrain_points: io_scs_tools.exp.transition_structs.terrain_points.TerrainPntsTrans :return: True if successfull; otherwise False @@ -274,7 +276,7 @@ def execute(dirpath, filename, prefab_locator_list, offset_matrix, used_terrain_ """:type: io_scs_tools.properties.object.ObjectSCSTools""" # create sign and set properties - sign = Sign(locator.name, locator_scs_props.scs_part) + sign = Sign(locator.name, used_parts.ensure_part(locator)) pos, rot, scale = _get_scs_transformation_components(offset_matrix.inverted() * locator.matrix_world) sign.set_position(pos) diff --git a/addon/io_scs_tools/exp/pip/node.py b/addon/io_scs_tools/exp/pip/node.py index cdd7947..d6ada31 100644 --- a/addon/io_scs_tools/exp/pip/node.py +++ b/addon/io_scs_tools/exp/pip/node.py @@ -19,7 +19,8 @@ # Copyright (C) 2015: SCS Software from collections import OrderedDict - +from mathutils import Vector +from mathutils.geometry import distance_point_to_plane from io_scs_tools.consts import PrefabLocators as _PL_consts from io_scs_tools.exp.pip.node_stream import Stream from io_scs_tools.internals.structure import SectionData as _SectionData @@ -91,6 +92,9 @@ def __prepare_streams__(self): """Converts terrain points from variant mapped indices to PIP streams and stores them. """ + # first prepare terrain points for export + self.__prepare_terrain_points__() + # ensure empty streams pos_stream = Stream(Stream.Types.POSITION) nor_stream = Stream(Stream.Types.NORMAL) @@ -130,6 +134,22 @@ def __prepare_streams__(self): if variant_stream.get_size() > 0: self.__tp_streams[Stream.Types.VARIANT_BLOCK] = variant_stream + def __prepare_terrain_points__(self): + """Reverses the order of terrain points if last point is closer to the node in it's forward direction. + """ + + for variant_index in self.__tp_per_variant: + + # now if tail is closer to node on it's forward axis, we reverse list + plane_co = Vector(self.__position) + plane_no = Vector(self.__direction) + + head_distance = distance_point_to_plane(self.__tp_per_variant[variant_index][0][0], plane_co, plane_no) + tail_distance = distance_point_to_plane(self.__tp_per_variant[variant_index][-1][0], plane_co, plane_no) + + if not (head_distance <= tail_distance - 0.001): # 0.001 taken from Maya exporter + self.__tp_per_variant[variant_index].reverse() + def set_input_lane(self, index, curve_index): """Set the curve index for given input lane. @@ -197,18 +217,41 @@ def add_terrain_point(self, position, normal, variant_index=-1): if position is None and normal is None: return - # ordering with insertion by position where closest terrain point - # shall be first in the list and farthest terrain point the last + # find nearest existing terrain point i = 0 - while i < len(self.__tp_per_variant[variant_index]): + smallest_dist = float("inf") + smallest_dist_i = 0 + tp_count = len(self.__tp_per_variant[variant_index]) + while i < tp_count: pos, normal = self.__tp_per_variant[variant_index][i] - if get_distance(self.__position, position) < get_distance(self.__position, pos): - break + curr_distance = get_distance(position, pos) + if curr_distance < smallest_dist: + smallest_dist = curr_distance + smallest_dist_i = i i += 1 - self.__tp_per_variant[variant_index].insert(i, (position, normal)) + # depending on index of the nearest point insert new point + if smallest_dist_i == 0: # no terrain points yet or the nearest is first just put it at start + self.__tp_per_variant[variant_index].insert(0, (position, normal)) + elif smallest_dist_i == tp_count - 1: # last is the nearest put it at the back + self.__tp_per_variant[variant_index].append((position, normal)) + else: + + # now this is a tricky one: once nearest point is in the middle. + # With that in mind take previous and next existing points and calculate + # to which new point is closer: + # 1. if closer to previous we have to insert new point before the nearest + # 2. if closer to next we have to insert new point after the nearest + + ahead_tp = self.__tp_per_variant[variant_index][smallest_dist_i - 1] + behind_tp = self.__tp_per_variant[variant_index][smallest_dist_i + 1] + + if get_distance(behind_tp[0], position) < get_distance(ahead_tp[0], position): + smallest_dist_i += 1 + + self.__tp_per_variant[variant_index].insert(smallest_dist_i, (position, normal)) Node.__global_tp_counter += 1 diff --git a/addon/io_scs_tools/exp/pit.py b/addon/io_scs_tools/exp/pit.py index e923b46..a77e3d2 100644 --- a/addon/io_scs_tools/exp/pit.py +++ b/addon/io_scs_tools/exp/pit.py @@ -305,6 +305,8 @@ def _fill_part_list(parts, used_parts_names, all_parts=False): """ part_list = [] for part_name in used_parts_names: + + part_written = False for part in parts: if part.name == part_name: @@ -320,21 +322,25 @@ def _fill_part_list(parts, used_parts_names, all_parts=False): part_atr.append(('INT', 'visible', include)) part_list.append((part.name, part_atr), ) + part_written = True + + if not part_written: + lprint("E Part %r from collected parts not avaliable in variant parts inventory, expect problems by conversion!", (part_name,)) return part_list -def export(root_object, filepath, used_materials, used_parts): +def export(root_object, filepath, used_parts, used_materials): """Export PIT. :param root_object: SCS root object :type root_object: bpy.types.Object :param filepath: PIT file path :type filepath: str - :param used_materials: materials transitional structure for accessing stored materials from PIM - :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :param used_parts: parts transitional structure for accessing stored parts from PIM, PIC and PIP :type used_parts: io_scs_tools.exp.transition_structs.parts.PartsTrans + :param used_materials: materials transitional structure for accessing stored materials from PIM + :type used_materials: io_scs_tools.exp.transition_structs.materials.MaterialsTrans :return: True if successful; False otherwise; :rtype: bool """ @@ -617,7 +623,6 @@ def export(root_object, filepath, used_materials, used_parts): # PARTS AND VARIANTS... used_parts_names = used_parts.get_as_list() - part_list_cnt = len(used_parts_names) if len(root_object.scs_object_variant_inventory) == 0: # If there is no Variant, add the Default one... part_list = _fill_part_list(root_object.scs_object_part_inventory, used_parts_names, all_parts=True) @@ -633,7 +638,7 @@ def export(root_object, filepath, used_materials, used_parts): # part_sections = fill_part_section(part_list) variant_section = _fill_variant_sections(variant_list) comment_header_section = _fill_comment_header_section(look_list, variant_list) - global_section = _fill_global_section(len(look_list), len(variant_list), part_list_cnt, len(used_materials_pairs)) + global_section = _fill_global_section(len(look_list), len(variant_list), len(used_parts_names), len(used_materials_pairs)) # DATA ASSEMBLING pit_container = [comment_header_section, header_section, global_section] diff --git a/addon/io_scs_tools/exp/pix.py b/addon/io_scs_tools/exp/pix.py index 74e2d25..9b2e899 100644 --- a/addon/io_scs_tools/exp/pix.py +++ b/addon/io_scs_tools/exp/pix.py @@ -42,7 +42,7 @@ def _get_objects_by_type(blender_objects, parts): :param blender_objects: list of the objects that should be sorted by type :type blender_objects: list of bpy.types.Object - :param parts: transitional parts class instance to collect parts to + :param parts: transitional parts class instance for adding users (parts without users won't be exported) :type parts: io_scs_tools.exp.transition_structs.parts.PartsTrans :return: more lists of objects in order: meshes, prefab_locators, model_locators, collision_locators and armutare object as last :rtype: list of [list] @@ -62,17 +62,17 @@ def _get_objects_by_type(blender_objects, parts): prefab_locator_list.append(obj) if _object_utils.has_part_property(obj): - parts.add(obj.scs_props.scs_part) + parts.add_user(obj) elif obj.scs_props.locator_type == 'Model': model_locator_list.append(obj) - parts.add(obj.scs_props.scs_part) + parts.add_user(obj) elif obj.scs_props.locator_type == 'Collision': collision_locator_list.append(obj) - parts.add(obj.scs_props.scs_part) + parts.add_user(obj) # ARMATURES elif obj.type == 'ARMATURE': @@ -88,7 +88,7 @@ def _get_objects_by_type(blender_objects, parts): if obj.data.scs_props.locator_preview_model_path == "": # Export object only if it's not a Preview Model... mesh_object_list.append(obj) - parts.add(obj.scs_props.scs_part) + parts.add_user(obj) else: print('!!! - Unhandled object type: %r' % str(obj.type)) @@ -119,7 +119,7 @@ def export(dirpath, root_object, game_object_list): # TRANSITIONAL STRUCTURES terrain_points = TerrainPntsTrans() - parts = PartsTrans() + parts = PartsTrans(root_object.scs_object_part_inventory) materials = MaterialsTrans() bones = BonesTrans() @@ -174,13 +174,13 @@ def export(dirpath, root_object, game_object_list): # EXPORT PIP if scs_globals.export_pip_file and prefab_locators and export_success: in_args = (dirpath, root_object.name, prefab_locators, root_object.matrix_world) - trans_structs_args = (terrain_points,) + trans_structs_args = (parts, terrain_points) export_success = _pip_exporter.execute(*(in_args + trans_structs_args)) # EXPORT PIT if scs_globals.export_pit_file and export_success: in_args = (root_object, dirpath + os.sep + root_object.name) - trans_structs_args = (materials, parts) + trans_structs_args = (parts, materials) export_success = _pit.export(*(in_args + trans_structs_args)) # PIS, PIA diff --git a/addon/io_scs_tools/exp/transition_structs/parts.py b/addon/io_scs_tools/exp/transition_structs/parts.py index b2aeab2..21e3a9e 100644 --- a/addon/io_scs_tools/exp/transition_structs/parts.py +++ b/addon/io_scs_tools/exp/transition_structs/parts.py @@ -20,47 +20,96 @@ from collections import OrderedDict +from io_scs_tools.utils.printout import lprint class PartsTrans: """Transitional parts class for storing used parts during export. - This storage shall be use to collect&store parts in PIM, PIC and PIP exporter and then use it in PIT exporter. + + This storage shall be used to store parts from SCS Root part inventory. Morover users + should be added to each part during collecting of all object to export. + Once exporters asks for list of parts, only the parts with at least one user will be returned. """ - def __init__(self): + def __init__(self, part_inventory): """Creates class instance of parts transitional structure. + :param part_inventory: part inventory for SCS Root object + :type part_inventory: list[io_scs_tools.properties.object.ObjectPartInventoryItem] """ self.__storage = OrderedDict() """:type: collections.OrderedDict[str, int]""" + self.__default_part = "" - def add(self, part_name): - """Adds part to storage. - - :param part_name: part name - :type part_name: str - """ - self.__storage[part_name] = 1 - - def count(self): - """Number of currently stored parts in transitional structure. + for i, part in enumerate(part_inventory): + self.__storage[part.name] = 0 + if i == 0: + self.__default_part = part.name - :return: number of currently stored parts - :rtype: int - """ - return len(self.__storage) + # if given part inventory won't have any parts inside, then we have to fallback to default part + # and properly report the issue + if self.__default_part == "": + self.__default_part = "defaultpart" + lprint("E Parts can't be properly constructed, 'defaultpart' will be used for export!\n\t " + "However issue should be addressed to developer...") def is_present(self, part_name): """Tells if given part is present in storage. + :param part_name: name of the part to search for + :type part_name: str :return: True if present; False otherwise :rtype: bool """ return part_name in self.__storage + def add_user(self, obj): + """Adds user to validated object part. + In case that object part doesn't exists in storage, default part get's a user + + :param obj: mesh or locator blender object from which part should be taken + :type obj: bpy.types.Object + """ + + part_name = self.ensure_part(obj, suppress_warning=True) + + self.__storage[part_name] += 1 + + def ensure_part(self, obj, suppress_warning=False): + """Ensures and returns part for the given mesh or locator object. + If part of the object is not preset in internal parts dictionary, + then default part is returned and invalid part name is properly reported to log. + + :param obj: mesh or locator blender object from which part should be taken + :type obj: bpy.types.Object + :param suppress_warning: flag for suppressing the warning from being raised + :type suppress_warning: bool + :return: part name of the object or default part if object part name is invalid + :rtype: str + """ + part_name = obj.scs_props.scs_part + + if not self.is_present(part_name): + if not suppress_warning: + lprint("W Invalid part name %r detected on object %r, using first available for export: %r!" + "Select object and try to assign part again.", + (part_name, obj.name, self.__default_part)) + part_name = self.__default_part + + return part_name + def get_as_list(self): - """Get stored part names list. + """Get stored part names list with at least one user. + + Parts without user shouldn't be exported thus are not returned by this method. :return: stored part names list :rtype: list[str] """ - return list(self.__storage.keys()) + + part_list = [] + + for part_name in self.__storage: + if self.__storage[part_name] > 0: + part_list.append(part_name) + + return part_list diff --git a/addon/io_scs_tools/imp/pim.py b/addon/io_scs_tools/imp/pim.py index f730f6f..9fd136e 100644 --- a/addon/io_scs_tools/imp/pim.py +++ b/addon/io_scs_tools/imp/pim.py @@ -1190,11 +1190,20 @@ def load_pim_file(context, filepath, terrain_points_trans=None, preview_model=Fa if preview_model: + # abort loading if no meshes inside imported model + if len(objects) == 0 and len(skinned_objects) == 0: + return None + bases = [] # get the bases of newly created objects for override for base in bpy.context.scene.object_bases: if base.object in objects: bases.append(base) + if base.object in skinned_objects: + bases.append(base) + + # get active object for joining meshes into it + active_object = objects[0] if len(objects) > 0 else skinned_objects[0] override = { 'window': bpy.context.window, @@ -1203,12 +1212,12 @@ def load_pim_file(context, filepath, terrain_points_trans=None, preview_model=Fa 'scene': bpy.context.scene, 'region': None, 'area': None, - 'active_object': objects[0], + 'active_object': active_object, 'selected_editable_bases': bases } bpy.ops.object.join(override) - return objects[0] + return active_object # CREATE MODEL LOCATORS locators = [] diff --git a/addon/io_scs_tools/internals/containers/config.py b/addon/io_scs_tools/internals/containers/config.py index 161577c..c97d501 100644 --- a/addon/io_scs_tools/internals/containers/config.py +++ b/addon/io_scs_tools/internals/containers/config.py @@ -110,7 +110,7 @@ def update_shader_presets_path(shader_presets_filepath): supported_effects_path = os.path.join(_path_utils.get_addon_installation_paths()[0], "supported_effects.bin") if os.path.isfile(supported_effects_path): try: - supported_effects_dict = pickle.load(open(supported_effects_path, "rb")) + supported_effects_dict = pickle.load(open(supported_effects_path, mode="rb")) except PermissionError: lprint("W Can't load supported effects file (persmission denied), please ensure read/write permissions for:\n\t %r\n\t " "Without supported effects file invalid combinations of shader and flavors can be created!", diff --git a/addon/io_scs_tools/internals/containers/parsers/mat.py b/addon/io_scs_tools/internals/containers/parsers/mat.py index d3f7d0b..f62fb4f 100644 --- a/addon/io_scs_tools/internals/containers/parsers/mat.py +++ b/addon/io_scs_tools/internals/containers/parsers/mat.py @@ -30,7 +30,7 @@ def read_data(filepath, print_info=False): attr_dict = {} - with open(filepath) as f: + with open(filepath, encoding="utf8") as f: f_data = f.read() f.close() diff --git a/addon/io_scs_tools/internals/containers/parsers/pix.py b/addon/io_scs_tools/internals/containers/parsers/pix.py index a20d6f9..8b898e8 100644 --- a/addon/io_scs_tools/internals/containers/parsers/pix.py +++ b/addon/io_scs_tools/internals/containers/parsers/pix.py @@ -363,7 +363,7 @@ def read_data(filepath, ind, print_info=False): pix_container = [] # if filepath: - file = open(filepath, 'r') + file = open(filepath, mode="r", encoding="utf8") while 1: data_type, line = next_line(file) if data_type in ('EOF', 'ERR'): diff --git a/addon/io_scs_tools/internals/containers/parsers/sii.py b/addon/io_scs_tools/internals/containers/parsers/sii.py index 20e622d..226b8e1 100644 --- a/addon/io_scs_tools/internals/containers/parsers/sii.py +++ b/addon/io_scs_tools/internals/containers/parsers/sii.py @@ -101,7 +101,7 @@ def parse_token(self): if not os.path.isfile(file_name): continue - file = open(file_name, 'r') + file = open(file_name, mode="r", encoding="utf8") included_lines = file.readlines() file.close() break @@ -331,7 +331,7 @@ def parse_file(filepath, print_info=False): print("** SII Parser ...") sii_container = [] - file = open(filepath, 'r') + file = open(filepath, mode="r", encoding="utf8") lines = file.readlines() file.close() diff --git a/addon/io_scs_tools/internals/containers/parsers/tobj.py b/addon/io_scs_tools/internals/containers/parsers/tobj.py index dbcae10..da2f8f7 100644 --- a/addon/io_scs_tools/internals/containers/parsers/tobj.py +++ b/addon/io_scs_tools/internals/containers/parsers/tobj.py @@ -18,6 +18,7 @@ # Copyright (C) 2015: SCS Software + def parse_file(filepath, print_info=False): """Reads data from TOBJ file and returns it's records as list. @@ -34,7 +35,7 @@ def parse_file(filepath, print_info=False): print(' filepath: %r' % str(filepath)) data = [] - with open(filepath) as f: + with open(filepath, encoding="utf8") as f: for i, line in enumerate(f): line_split = line.strip().split() if len(line_split) != 0: diff --git a/addon/io_scs_tools/internals/containers/pix.py b/addon/io_scs_tools/internals/containers/pix.py index 851b0dd..bb203b1 100644 --- a/addon/io_scs_tools/internals/containers/pix.py +++ b/addon/io_scs_tools/internals/containers/pix.py @@ -32,7 +32,7 @@ def fast_check_for_pia_skeleton(pia_filepath, skeleton): """Check for the skeleton record in PIA file without parsing the whole file. It takes filepath and skeleton name (string) and returns True if the skeleton record in the file is the same as skeleton name provided, otherwise False.""" - file = open(pia_filepath, 'r') + file = open(pia_filepath, mode="r", encoding="utf8") while 1: data_type, line = _pix_parser.next_line(file) if data_type in ('EOF', 'ERR'): @@ -58,7 +58,7 @@ def utter_check_for_pia_skeleton(pia_filepath, armature): """Skeleton analysis in PIA file with reasonably quick searching the whole file. It takes filepath and an Armature object and returns True if the skeleton in PIA file can be used for the skeleton in provided Armature object, otherwise it returns False.""" - file = open(pia_filepath, 'r') + file = open(pia_filepath, mode="r", encoding="utf8") skeleton = None bone_matches = [] while 1: diff --git a/addon/io_scs_tools/internals/containers/tobj.py b/addon/io_scs_tools/internals/containers/tobj.py index 57ff8ad..b6bf363 100644 --- a/addon/io_scs_tools/internals/containers/tobj.py +++ b/addon/io_scs_tools/internals/containers/tobj.py @@ -177,7 +177,7 @@ def write_data_to_file(self, filepath=None): try: - file = open(self.filepath, "w", encoding="utf8", newline="\n") + file = open(self.filepath, mode="w", encoding="utf8", newline="\n") except IOError: diff --git a/addon/io_scs_tools/internals/containers/writers/pix.py b/addon/io_scs_tools/internals/containers/writers/pix.py index ac0e92d..53b2c27 100644 --- a/addon/io_scs_tools/internals/containers/writers/pix.py +++ b/addon/io_scs_tools/internals/containers/writers/pix.py @@ -182,7 +182,7 @@ def write_data(container, filepath, ind=' ', print_on_success=True, print_inf orig_ind = ind # WRITE TO FILE - file = open(filepath, "w", encoding="utf8", newline="\n") + file = open(filepath, mode="w", encoding="utf8", newline="\n") fw = file.write if print_on_success: lprint('I WRITTING PIX FILE to: %r', (filepath,)) diff --git a/addon/io_scs_tools/internals/preview_models/__init__.py b/addon/io_scs_tools/internals/preview_models/__init__.py index e26bb1e..4d5839c 100644 --- a/addon/io_scs_tools/internals/preview_models/__init__.py +++ b/addon/io_scs_tools/internals/preview_models/__init__.py @@ -140,6 +140,19 @@ def load(locator): from io_scs_tools.imp import pim as _pim_import obj = _pim_import.load_pim_file(bpy.context, abs_filepath, preview_model=True) + + # in case used preview model doesn't have any mesh, abort loading, report error and reset path + # Path has to be reset to prevent loading preview model over and over again + # from possible callbacks trying to fix not present preview model + if not obj: + message = "Selected PIM model doesn't have any mesh inside, so it can not be used as a preview model." + bpy.ops.wm.show_warning_message('INVOKE_DEFAULT', is_modal=True, title="Preview Model Load Error!", message=message, + width=500, + height=100) + lprint("E " + message) + locator.scs_props.locator_preview_model_path = "" + return False + obj.name = prem_name obj.data.name = prem_name obj.data.scs_props.locator_preview_model_path = locator.scs_props.locator_preview_model_path diff --git a/addon/io_scs_tools/operators/scene.py b/addon/io_scs_tools/operators/scene.py index 79d6784..7ec1637 100644 --- a/addon/io_scs_tools/operators/scene.py +++ b/addon/io_scs_tools/operators/scene.py @@ -1036,7 +1036,7 @@ def execute(self, context): linked_path = os.path.join(main_path, link_hash) _path_utils.ensure_symlink(_get_scs_globals().scs_project_path, linked_path) - with open(extra_mount_path, mode="w") as f: + with open(extra_mount_path, mode="w", encoding="utf8") as f: f.write(link_hash) return ConversionHelper.RunConversion.execute(self, context) @@ -1064,7 +1064,7 @@ def execute(self, context): self.report({'ERROR'}, "Conversion tools path is incorrect! Please fix it first.") return {'CANCELLED'} - with open(extra_mount_path, mode="w") as f: + with open(extra_mount_path, mode="w", encoding="utf8") as f: for path_entry in _get_scs_globals().conv_hlpr_custom_paths: diff --git a/addon/io_scs_tools/operators/wm.py b/addon/io_scs_tools/operators/wm.py index f34ccde..89d3c74 100644 --- a/addon/io_scs_tools/operators/wm.py +++ b/addon/io_scs_tools/operators/wm.py @@ -49,14 +49,17 @@ def popup_draw(self, context): self.layout.label(line) def draw(self, context): - row = self.layout.row().split(0.05) + row = self.layout.row().split(0.00001 * self.width) row.label(" ") col = row.column() col.label(self.title, icon=self.icon) + col.separator() lines = self.message.split("\n") for line in lines: - col.label(line) + col.label(line.strip()) + col.separator() + col.separator() def execute_popup(self, context): context.window_manager.popup_menu(self.popup_draw, title=self.title, icon=self.icon) diff --git a/addon/io_scs_tools/operators/world.py b/addon/io_scs_tools/operators/world.py index 697206c..caf7902 100644 --- a/addon/io_scs_tools/operators/world.py +++ b/addon/io_scs_tools/operators/world.py @@ -255,25 +255,26 @@ class SCSPathsInitialization(bpy.types.Operator): DUMP_LEVEL = 3 """Constant for log level index according in SCS Globals, on which operator should printout extended report.""" - # Static running variables - __timer = None - """Timer instance variable. We use timer to initilize paths gradually one by one.""" __last_time = None """Used for time tracking on each individual path initialization.""" __path_in_progress = False """Used as flag for indicating path being processed. So another execute method call shouldn't be triggered.""" + + # Static running variables + __static_timer = None + """Timer instance variable. We use timer to initilize paths gradually one by one.""" __static_paths_count = 0 """Static variable holding number of all paths that had to be processed. Used for reporting progress eg. 'X of Y paths done'.""" __static_paths_done = 0 """Static variable holding number of already processed paths. Used for reporting progress eg. 'X of Y paths done'.""" - __static_running_instances = 0 - """Used for indication of already running operator.""" + __static_abort_instances = False + """Static variable holding existing operator instance alive until it's set to false. Used when multiple instance are invoked.""" # Static data storage __static_message = "" """Static variable holding printout extended message. This message used only if dump level is high enough.""" __static_paths_list = [] - """Static variable holding CollectionProperty with entries of Filepath class, defining paths that needs initialization. + """Static variable holding list with dictonariy entries each of them representing Filepath class entry that needs in initialization. Processed paths are removed on the fly. """ __static_callbacks = [] @@ -306,7 +307,7 @@ def is_running(): :return: True if scs paths initialization is still in progress; False if none instances are running :rtype: bool """ - return SCSPathsInitialization.__static_running_instances > 0 + return len(SCSPathsInitialization.__static_paths_list) > 0 and SCSPathsInitialization.__static_timer @staticmethod def append_callback(callback): @@ -325,25 +326,6 @@ def append_callback(callback): return False - def __init__(self): - SCSPathsInitialization.__static_running_instances += 1 - - def __del__(self): - if SCSPathsInitialization.__static_running_instances > 0: - - SCSPathsInitialization.__static_running_instances -= 1 - - # if user disables add-on, destructor is called again, so cleanup timer and static variables - if SCSPathsInitialization.__static_running_instances <= 0: - - SCSPathsInitialization.__static_message = "" - SCSPathsInitialization.__static_paths_list = [] - SCSPathsInitialization.__static_callbacks = [] - - if bpy.context and bpy.context.window_manager and self.__timer: - wm = bpy.context.window_manager - wm.event_timer_remove(self.__timer) - def execute(self, context): # do not proceed if list is already empty @@ -353,9 +335,9 @@ def execute(self, context): self.__path_in_progress = True # update message with current path and apply it - SCSPathsInitialization.__static_message += "Initializing " + SCSPathsInitialization.__static_paths_list[0].name + "..." - setattr(_get_scs_globals(), SCSPathsInitialization.__static_paths_list[0].attr, SCSPathsInitialization.__static_paths_list[0].path) - SCSPathsInitialization.__static_paths_list.remove(0) # remove just processed item + SCSPathsInitialization.__static_message += "Initializing " + SCSPathsInitialization.__static_paths_list[0]["name"] + "..." + setattr(_get_scs_globals(), SCSPathsInitialization.__static_paths_list[0]["attr"], SCSPathsInitialization.__static_paths_list[0]["path"]) + SCSPathsInitialization.__static_paths_list = SCSPathsInitialization.__static_paths_list[1:] # remove just processed item SCSPathsInitialization.__static_message += " Done in %.2f s!\n" % (time() - self.__last_time) SCSPathsInitialization.__static_paths_done += 1 @@ -386,8 +368,11 @@ def cancel(self, context): SCSPathsInitialization.__static_message = "" SCSPathsInitialization.__static_paths_list.clear() - wm = context.window_manager - wm.event_timer_remove(self.__timer) + # try to reset timer if window manager is available + if len(bpy.data.window_managers) > 0: + wm = bpy.data.window_managers[0] + wm.event_timer_remove(SCSPathsInitialization.__static_timer) + SCSPathsInitialization.__static_timer = None # report finished progress to 3d view report operator if int(_get_scs_globals().dump_level) < self.DUMP_LEVEL: @@ -405,8 +390,16 @@ def cancel(self, context): callback() SCSPathsInitialization.__static_callbacks.remove(callback) + lprint("D Paths initialization cancel invoked!") + def modal(self, context, event): + # if abort was requested finish immediately + if SCSPathsInitialization.__static_abort_instances: + self.cancel(context) + lprint("I Paths initialization aborted, deleting operator!") + return {'FINISHED'} + if event.type == "TIMER": # process timer event if len(SCSPathsInitialization.__static_paths_list) <= 0: # once no more paths to process abort it @@ -426,52 +419,54 @@ def modal(self, context, event): def invoke(self, context, event): + self.__last_time = time() # reset last time now as everything starts again SCSPathsInitialization.__static_paths_done = 0 # reset done paths counter as everything starts here - # if another instance is running update static paths list with paths passed to this instance - if SCSPathsInitialization.__static_running_instances > 1: + # engage abortion of any running instances + SCSPathsInitialization.__abort_any_running_instances = True - # now fill up new paths to static inventory - for filepath_prop in self.paths_list: + # now fill up new paths to static inventory + for filepath_prop in self.paths_list: - # search for identical path in not yet processed paths list - old_item = None - for item in SCSPathsInitialization.__static_paths_list: - if item.attr == filepath_prop.attr: - old_item = item - break + # sort out only unique paths and merge them with current static path list + old_item = None + for item in SCSPathsInitialization.__static_paths_list: + if item["attr"] == filepath_prop.attr: + old_item = item + break - # if old item is found just reuse it instead of adding new item to list - item = old_item if old_item else SCSPathsInitialization.__static_paths_list.add() - item.name = filepath_prop.name - item.attr = filepath_prop.attr - item.path = filepath_prop.path - - # update paths counter to the current paths list length - SCSPathsInitialization.__static_paths_count = len(SCSPathsInitialization.__static_paths_list) - - # cancel this instance if another instance is still running - # (it can happen that previous operator was done until now as each operator runs asynchronously, - # in that case we have to continue otherwise latest paths won't be ) - if SCSPathsInitialization.__static_running_instances > 0: + # if old item is found just reuse it instead of adding new item to list + if old_item: + old_item["name"] = filepath_prop["name"] + old_item["path"] = filepath_prop["path"] + else: + SCSPathsInitialization.__static_paths_list.append( + { + "name": filepath_prop["name"], + "attr": filepath_prop["attr"], + "path": filepath_prop["path"] + } + ) - SCSPathsInitialization.__static_message = "Restarting initialization...\n\n" - bpy.ops.wm.show_3dview_report('INVOKE_DEFAULT', message=SCSPathsInitialization.__static_message, - hide_controls=True, is_progress_message=True) + # update paths counter to the current paths list length + SCSPathsInitialization.__static_paths_count = len(SCSPathsInitialization.__static_paths_list) - lprint("D Restarting initialization...!") - return {'CANCELLED'} + # now as paths list is updated and we are about to run our instance + # release switch that should be aborting all the rest of operator instances + SCSPathsInitialization.__abort_any_running_instances = False SCSPathsInitialization.__static_message = "Starting initialization...\n" bpy.ops.wm.show_3dview_report('INVOKE_DEFAULT', message=SCSPathsInitialization.__static_message, hide_controls=True, is_progress_message=True) - SCSPathsInitialization.__static_paths_list = self.paths_list - SCSPathsInitialization.__static_paths_count = len(self.paths_list) + wm = bpy.data.window_managers[0] + window = wm.windows[0] + + # in case any operator was previously invoked we have to remove timer before adding new + if SCSPathsInitialization.__static_timer: + wm.event_timer_remove(SCSPathsInitialization.__static_timer) - wm = context.window_manager - self.__timer = wm.event_timer_add(0.2, context.window) - self.__last_time = time() + SCSPathsInitialization.__static_timer = wm.event_timer_add(0.2, window) wm.modal_handler_add(self) lprint("I Paths initialization started...")