diff --git a/README.md b/README.md index a147f32..4fd7e7b 100644 --- a/README.md +++ b/README.md @@ -26,14 +26,14 @@ Installation and Usage: Addon is located in "addon/io_scs_tools" folder. Use standard Blender addon installation procedure for installing it. For more information visit wiki at -"https://github.com/SCSSoftware/BlenderTools/wiki". +"http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools". Notes: ------ - In case of trouble installing SCS Blender Tools make sure you're using compatible Blender version. SCS Blender Tools for Blender versions - prior 2.75 are not supported. + prior 2.73 are not supported. Help, questions, troubleshooting: @@ -41,7 +41,7 @@ Help, questions, troubleshooting: If you encounter any problems or have questions regarding SCS Blender Tools, please visit forum at "http://forum.scssoft.com/viewforum.php?f=159" and don't hesitate to ask if your problem wasn't addressed already. Also -don't miss the wiki ("https://github.com/SCSSoftware/BlenderTools/wiki") +don't miss wiki ("http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools") for many useful tips and docs. diff --git a/addon/io_scs_tools/__init__.py b/addon/io_scs_tools/__init__.py index 1ae3f5d..fbf40a1 100644 --- a/addon/io_scs_tools/__init__.py +++ b/addon/io_scs_tools/__init__.py @@ -22,7 +22,7 @@ "name": "SCS Tools", "description": "Setup models, Import-Export SCS data format", "author": "Simon Lusenc (50keda), Milos Zajic (4museman)", - "version": (1, 11, "2032583"), + "version": (1, 12, "be00ed8"), "blender": (2, 78, 0), "location": "File > Import-Export", "wiki_url": "http://modding.scssoft.com/wiki/Documentation/Tools/SCS_Blender_Tools", diff --git a/addon/io_scs_tools/consts.py b/addon/io_scs_tools/consts.py index fe6b07a..a296af4 100644 --- a/addon/io_scs_tools/consts.py +++ b/addon/io_scs_tools/consts.py @@ -88,6 +88,24 @@ class View3DReport: (400, 45), # used when report text is shown (415, 45) # used when report text is hidden (aka condensed mode) ) + SCROLLUP_BTN_AREA = (545, 585, 26, 54) + SCROLLUP_BTN_TEXT = ( + "↑", # used when report text is shown + "" # used when report text is hidden (aka condensed mode) + ) + SCROLLUP_BTN_TEXT_POS = ( + (560, 45), # used when report text is shown + (560, 45) # used when report text is hidden (aka condensed mode) + ) + SCROLLDOWN_BTN_AREA = (585, 625, 26, 54) + SCROLLDOWN_BTN_TEXT = ( + "↓", # used when report text is shown + "" # used when report text is hidden (aka condensed mode) + ) + SCROLLDOWN_BTN_TEXT_POS = ( + (600, 45), # used when report text is shown + (600, 45) # used when report text is hidden (aka condensed mode) + ) class Icons: @@ -354,9 +372,12 @@ class PSP: UNLOAD_HARD_POS = 20 UNLOAD_RIGID_POS = 21 WEIGHT_CAT_POS = 22 + COMPANY_UNLOAD_POS = 23 + TRAILER_SPAWN = 24 + LONG_TRAILER_POS = 25 class TST: - """Constants representing type of traffic semaphores. + """Constants representing type of traffic semaphores. """ PROFILE = 0 MODEL_ONLY = 1 @@ -474,6 +495,13 @@ class PaintjobTools: """Constants for paintjob tools. """ + class VehicleTypes: + """Vehicle types, defining where vehicle will be placed in defs and model paths. + """ + NONE = "none" + TRUCK = "truck" + TRAILER = "trailer_owned" + uvs_name_2nd = "scs_paintjob_2nd" """2nd uvs layer name used during unification on export""" uvs_name_3rd = "scs_paintjob_3rd" @@ -483,3 +511,79 @@ class PaintjobTools: """Name of the property for saving references paths to models inside a group data-block.""" export_tag_obj_name = ".scs_export_group" """Name of the object inside the group which visibility tells us either group should be exported or no.""" + model_variant_prop = ".scs_variant" + """Name of the property for saving variant of the model inside group encapsulating imported paintable model.""" + + id_mask_colors = ( + (51, 0, 0), + (255, 136, 0), + (217, 202, 0), + (134, 179, 140), + (0, 190, 204), + (0, 31, 115), + (117, 70, 140), + (191, 96, 147), + (242, 61, 61), + (127, 68, 0), + (102, 95, 0), + (64, 255, 140), + (0, 204, 255), + (0, 0, 51), + (41, 0, 51), + (204, 0, 82), + (204, 102, 102), + (178, 137, 89), + (173, 179, 89), + (0, 77, 41), + (0, 41, 51), + (108, 108, 217), + (230, 128, 255), + (89, 0, 36), + (230, 172, 172), + (230, 203, 172), + (100, 102, 77), + (48, 191, 124), + (0, 170, 255), + (191, 191, 255), + (83, 0, 89), + (166, 124, 141), + (140, 49, 35), + (128, 113, 96), + (57, 77, 19), + (57, 77, 68), + (64, 106, 128), + (38, 38, 51), + (217, 0, 202), + (127, 0, 34), + (255, 115, 64), + (229, 172, 57), + (234, 255, 191), + (0, 51, 34), + (0, 68, 128), + (34, 0, 255), + (64, 32, 62), + (115, 57, 65), + (76, 34, 19), + (102, 77, 26), + (133, 204, 51), + (0, 255, 238), + (0, 27, 51), + (48, 0, 179), + (255, 191, 251), + (51, 26, 29), + (191, 156, 143), + (51, 38, 13), + (68, 255, 0), + (0, 115, 107), + (153, 180, 204), + (119, 54, 217), + (153, 0, 122), + (204, 112, 51), + (51, 47, 38), + (32, 128, 45), + (143, 191, 188), + (83, 116, 166), + (119, 105, 140), + (255, 64, 166) + ) + """Array of unique colors for building ID mask texture.""" diff --git a/addon/io_scs_tools/exp/pim/exporter.py b/addon/io_scs_tools/exp/pim/exporter.py index 2ae53af..29fa53b 100644 --- a/addon/io_scs_tools/exp/pim/exporter.py +++ b/addon/io_scs_tools/exp/pim/exporter.py @@ -151,6 +151,9 @@ def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepat for scale_axis in parent.scale: scale_sign *= scale_axis + for scale_axis in parent.delta_scale: + scale_sign *= scale_axis + parent = parent.parent face_flip = scale_sign < 0 diff --git a/addon/io_scs_tools/exp/pim_ef/exporter.py b/addon/io_scs_tools/exp/pim_ef/exporter.py index b31b4a3..f42b90a 100644 --- a/addon/io_scs_tools/exp/pim_ef/exporter.py +++ b/addon/io_scs_tools/exp/pim_ef/exporter.py @@ -125,6 +125,9 @@ def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepat for scale_axis in parent.scale: scale_sign *= scale_axis + for scale_axis in parent.delta_scale: + scale_sign *= scale_axis + parent = parent.parent winding_order = 1 @@ -268,8 +271,7 @@ def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepat rgbas.append(vcol) rgbas_names[_MESH_consts.default_vcol] = True - # export rest of the vertex colors too, but do not apply extra multiplies of SCS exporter - # as rest of the layers are just artist layers + # export rest of the vertex colors too (also multiply with 2 and with vcol multiplicator) for vcol_layer in mesh.vertex_colors: # we already computed thoose so ignore them @@ -277,7 +279,7 @@ def execute(dirpath, name_suffix, root_object, armature_object, skeleton_filepat continue color = vcol_layer.data[loop_i].color - vcol = (color[0], color[1], color[2], 1.0) + vcol = (color[0] * 2 * vcol_multi, color[1] * 2 * vcol_multi, color[2] * 2 * vcol_multi) rgbas.append(vcol) rgbas_names[vcol_layer.name] = True diff --git a/addon/io_scs_tools/exp/pim_ef/piece_face.py b/addon/io_scs_tools/exp/pim_ef/piece_face.py index 107d73a..4b455bc 100644 --- a/addon/io_scs_tools/exp/pim_ef/piece_face.py +++ b/addon/io_scs_tools/exp/pim_ef/piece_face.py @@ -102,11 +102,17 @@ def add_rgbas(self, rgbas, rgbas_names): for i, rgba in enumerate(rgbas): - rgba_type = Stream.Types.RGBA + str(i) - if rgba_type not in self.__streams: - self.__streams[rgba_type] = Stream(Stream.Types.RGBA, i, rgbas_names[i]) + if len(rgba) == 3: + stream_type = Stream.Types.RGB + vcol_type = Stream.Types.RGB + str(i) + else: + stream_type = Stream.Types.RGBA + vcol_type = Stream.Types.RGBA + str(i) - stream = self.__streams[rgba_type] + if vcol_type not in self.__streams: + self.__streams[vcol_type] = Stream(stream_type, i, rgbas_names[i]) + + stream = self.__streams[vcol_type] """:type: Stream""" stream.add_entry(rgba) diff --git a/addon/io_scs_tools/exp/pim_ef/piece_stream.py b/addon/io_scs_tools/exp/pim_ef/piece_stream.py index e60a6aa..2c28169 100644 --- a/addon/io_scs_tools/exp/pim_ef/piece_stream.py +++ b/addon/io_scs_tools/exp/pim_ef/piece_stream.py @@ -39,7 +39,10 @@ def __init__(self, stream_type, index, name=""): super().__init__(stream_type, index) # exchange format support multiple vertex color layers - if stream_type == Stream.Types.RGBA: + if stream_type == Stream.Types.RGB: + self.__tag_index = index + self.__format = "FLOAT3" + elif stream_type == Stream.Types.RGBA: self.__tag_index = index self.__format = "FLOAT4" diff --git a/addon/io_scs_tools/exp/pip/exporter.py b/addon/io_scs_tools/exp/pip/exporter.py index d1a94a9..20ff6dd 100644 --- a/addon/io_scs_tools/exp/pip/exporter.py +++ b/addon/io_scs_tools/exp/pip/exporter.py @@ -248,7 +248,7 @@ def execute(dirpath, filename, name_suffix, prefab_locator_list, offset_matrix, curve.set_flags(loc1.scs_props, False) curve.set_semaphore_id(int(loc0_scs_props.locator_prefab_np_traffic_semaphore)) - curve.set_traffic_rule(loc1_scs_props.locator_prefab_np_traffic_rule) + curve.set_traffic_rule(loc0_scs_props.locator_prefab_np_traffic_rule) # set next/prev curves for next_key in curve_entry.next_curves: diff --git a/addon/io_scs_tools/exp/pit.py b/addon/io_scs_tools/exp/pit.py index c7b40e4..2209b0e 100644 --- a/addon/io_scs_tools/exp/pit.py +++ b/addon/io_scs_tools/exp/pit.py @@ -480,6 +480,8 @@ def export(root_object, filepath, name_suffix, used_parts, used_materials): # print(' value: %s' % str(value)) if format_prop == 'FLOAT': attribute_data.props.append((rec[0], ["&&", (value,)])) + elif format_prop == 'INT': + attribute_data.props.append((rec[0], ["ii", (value,)])) else: attribute_data.props.append((rec[0], ["i", tuple(value)])) attribute_sections.append(attribute_data) @@ -542,7 +544,7 @@ def export(root_object, filepath, name_suffix, used_parts, used_materials): if attr_prop == "Format": format_value = attribute_dict[attr_prop] - if attr_prop == "Value" and ("FLOAT" in format_value or "STRING" in format_value): + if attr_prop == "Value" and ("FLOAT" in format_value or "STRING" in format_value or "INT" in format_value): tag_prop = attribute_dict["Tag"].replace("[", "").replace("]", "") if "aux" in tag_prop: diff --git a/addon/io_scs_tools/imp/pim_ef.py b/addon/io_scs_tools/imp/pim_ef.py index 21f476f..b9209da 100644 --- a/addon/io_scs_tools/imp/pim_ef.py +++ b/addon/io_scs_tools/imp/pim_ef.py @@ -257,12 +257,11 @@ def _create_piece( context.window_manager.progress_update(0.5) # VERTEX COLOR + mesh_rgb_final = {} if mesh_rgba: - mesh_rgb_final = mesh_rgba - elif mesh_rgb: - mesh_rgb_final = mesh_rgb - else: - mesh_rgb_final = [] + mesh_rgb_final.update(mesh_rgba) + if mesh_rgb: + mesh_rgb_final.update(mesh_rgb) for vc_layer_name in mesh_rgb_final: max_value = mesh_rgb_final[vc_layer_name][0][0][0] / 2 diff --git a/addon/io_scs_tools/internals/containers/parsers/sii.py b/addon/io_scs_tools/internals/containers/parsers/sii.py index 226b8e1..918101f 100644 --- a/addon/io_scs_tools/internals/containers/parsers/sii.py +++ b/addon/io_scs_tools/internals/containers/parsers/sii.py @@ -322,11 +322,36 @@ def _parse_unit(tokenizer): return None -def parse_file(filepath, print_info=False): +def _parse_bare_file(filepath, print_info=False): + if print_info: + print("** SII Parser ...") + unit = _UnitData("", "", is_headless=True) + + file = open(filepath, mode="r", encoding="utf8") + lines = file.readlines() + file.close() + + tokenizer = _Tokenizer(lines, filepath, []) + + while 1: + if tokenizer.consume_token_if_match('eof', '') is not None: + if print_info: + print("** Bare SII Parser END") + return [unit] + + if not _parse_unit_property(tokenizer, unit): + print("Unit property parsing failed") + return None + + +def parse_file(filepath, is_sui=False, print_info=False): """ Reads SCS SII definition file from disk, parse it and return its full content in a form of hierarchical structure. """ + if is_sui: + return _parse_bare_file(filepath, print_info) + if print_info: print("** SII Parser ...") sii_container = [] diff --git a/addon/io_scs_tools/internals/containers/sii.py b/addon/io_scs_tools/internals/containers/sii.py index 12cdf36..7ea5148 100644 --- a/addon/io_scs_tools/internals/containers/sii.py +++ b/addon/io_scs_tools/internals/containers/sii.py @@ -25,13 +25,21 @@ from io_scs_tools.internals.containers.writers import sii as _sii_writer -def get_data_from_file(filepath): - """Returns entire data in data container from specified SII definition file.""" +def get_data_from_file(filepath, is_sui=False): + """Returns entire data in data container from specified SII definition file. + + :param filepath: absolute file path where SII should be read from + :type filepath: str + :param is_sui: True if file should be read as SUI, in that case only one unit will be returned + :type is_sui: bool + :return: list of SII Units if parsing succeded; otherwise None + :rtype: list[io_scs_tools.internals.structure.UnitData] | None + """ container = None if filepath: if os.path.isfile(filepath): - container = _sii_reader.parse_file(filepath) + container = _sii_reader.parse_file(filepath, is_sui=is_sui) if container: if len(container) < 1: lprint('D SII file "%s" is empty!', (_path_utils.readable_norm(filepath),)) @@ -136,7 +144,7 @@ def get_unit_property(container, prop, unit_instance=0): :param unit_instance: index of unit instance in container list that we are validating :type unit_instance: int :return: None if property is not found insde unit instance; otherwise value of the property - :rtype: None|object + :rtype: None|any """ value = None @@ -157,7 +165,7 @@ def get_direct_unit_property(unit, prop): :param prop: name of the property we are looking for :type prop: str :return: None if property is not found insde unit instance; otherwise value of the property - :rtype: None|object + :rtype: None|any """ value = None diff --git a/addon/io_scs_tools/internals/open_gl/core.py b/addon/io_scs_tools/internals/open_gl/core.py index c799faa..0e9304c 100644 --- a/addon/io_scs_tools/internals/open_gl/core.py +++ b/addon/io_scs_tools/internals/open_gl/core.py @@ -417,6 +417,7 @@ def _draw_3dview_report(region): :type region: bpy.types.Region """ pos = region.height - 62 + show_scroll_controls = _Show3DViewReportOperator.is_scrolled() if _Show3DViewReportOperator.has_lines(): @@ -466,6 +467,7 @@ def _draw_3dview_report(region): if pos - 60 < 0: blf.position(0, 20, pos, 0) blf.draw(0, "...") + show_scroll_controls = True break blf.position(0, 20, pos, 0) @@ -516,3 +518,39 @@ def _draw_3dview_report(region): # draw hide button text blf.position(0, hide_btn_text_pos[0], region.height - hide_btn_text_pos[1], 0) blf.draw(0, hide_btn_text) + + # draw scroll controls + if show_scroll_controls: + + # draw scroll up button + glColor3f(.4, .4, .4) + glBegin(GL_POLYGON) + glVertex3f(_OP_consts.View3DReport.SCROLLUP_BTN_AREA[0], region.height - _OP_consts.View3DReport.SCROLLUP_BTN_AREA[2], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLUP_BTN_AREA[1], region.height - _OP_consts.View3DReport.SCROLLUP_BTN_AREA[2], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLUP_BTN_AREA[1], region.height - _OP_consts.View3DReport.SCROLLUP_BTN_AREA[3], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLUP_BTN_AREA[0], region.height - _OP_consts.View3DReport.SCROLLUP_BTN_AREA[3], 0) + glEnd() + + # draw scroll down button + glBegin(GL_POLYGON) + glVertex3f(_OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[0], region.height - _OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[2], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[1], region.height - _OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[2], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[1], region.height - _OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[3], 0) + glVertex3f(_OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[0], region.height - _OP_consts.View3DReport.SCROLLDOWN_BTN_AREA[3], 0) + glEnd() + + # gather texts and positions + scrollup_btn_text_pos = _OP_consts.View3DReport.SCROLLUP_BTN_TEXT_POS[int(not _Show3DViewReportOperator.is_shown())] + scrollup_btn_text = _OP_consts.View3DReport.SCROLLUP_BTN_TEXT[int(not _Show3DViewReportOperator.is_shown())] + + scrolldown_btn_text_pos = _OP_consts.View3DReport.SCROLLDOWN_BTN_TEXT_POS[int(not _Show3DViewReportOperator.is_shown())] + scrolldown_btn_text = _OP_consts.View3DReport.SCROLLDOWN_BTN_TEXT[int(not _Show3DViewReportOperator.is_shown())] + + # draw scroll up button text + glColor3f(1, 1, 1) + blf.position(0, scrollup_btn_text_pos[0], region.height - scrollup_btn_text_pos[1], 0) + blf.draw(0, scrollup_btn_text) + + # draw scroll down button text + blf.position(0, scrolldown_btn_text_pos[0], region.height - scrolldown_btn_text_pos[1], 0) + blf.draw(0, scrolldown_btn_text) diff --git a/addon/io_scs_tools/internals/persistent/file_save.py b/addon/io_scs_tools/internals/persistent/file_save.py index 6c11543..7c0e507 100644 --- a/addon/io_scs_tools/internals/persistent/file_save.py +++ b/addon/io_scs_tools/internals/persistent/file_save.py @@ -22,6 +22,7 @@ from bpy.app.handlers import persistent from io_scs_tools.internals.containers import config as _config_container from io_scs_tools.utils import get_scs_globals as _get_scs_globals +from io_scs_tools.utils import ensure_scs_globals_save as _ensure_scs_globals_save @persistent @@ -55,6 +56,9 @@ def pre_save(scene): if scs_anim.action in bpy.data.actions: bpy.data.actions[scs_anim.action].use_fake_user = True + # make sure to save world holding SCS globals + _ensure_scs_globals_save() + @persistent def post_save(scene): diff --git a/addon/io_scs_tools/internals/preview_models/__init__.py b/addon/io_scs_tools/internals/preview_models/__init__.py index 4d5839c..365a825 100644 --- a/addon/io_scs_tools/internals/preview_models/__init__.py +++ b/addon/io_scs_tools/internals/preview_models/__init__.py @@ -99,12 +99,14 @@ def unlink(preview_model): bpy.context.scene.scs_cached_num_objects = len(bpy.context.scene.objects) -def load(locator): +def load(locator, deep_reload=False): """Makes a preview model for a locator and link it to it NOTE: locator preview model path must be set :param locator: locator object to which preview model should be set :type locator: bpy.types.Object + :param deep_reload: should model be reloaded completely? Use in case model mesh should be freshly loaded from disc + :type deep_reload: bool :return: True if preview model was set; False otherwise :rtype: bool """ @@ -131,7 +133,7 @@ def load(locator): if load_model: - unload(locator) + unload(locator, do_mesh_unlink=deep_reload) prem_name = str("prem_" + locator.name) obj = _get_model_mesh(locator, prem_name) @@ -165,11 +167,13 @@ def load(locator): return False -def unload(locator): +def unload(locator, do_mesh_unlink=False): """Clears a preview model from a locator :param locator: locator object from which preview model should be deleted :type locator: bpy.types.Object + :param do_mesh_unlink: should mesh be unloaded too? Use it only when model should be reloaded + :type do_mesh_unlink: bool """ for child in locator.children: @@ -178,9 +182,15 @@ def unload(locator): # first uncache it _cache.delete_entry(child.name) + # delete object & mesh + mesh = child.data + bpy.context.scene.objects.unlink(child) bpy.data.objects.remove(child, do_unlink=True) + if do_mesh_unlink: + bpy.data.meshes.remove(mesh, do_unlink=True) + # update scene children count to prevent delete to be triggered bpy.context.scene.scs_cached_num_objects = len(bpy.context.scene.objects) diff --git a/addon/io_scs_tools/internals/shaders/eut2/__init__.py b/addon/io_scs_tools/internals/shaders/eut2/__init__.py index b58ebba..488a411 100644 --- a/addon/io_scs_tools/internals/shaders/eut2/__init__.py +++ b/addon/io_scs_tools/internals/shaders/eut2/__init__.py @@ -80,6 +80,10 @@ def get_shader(effect): from io_scs_tools.internals.shaders.eut2.light_tex import LightTex as Shader + elif effect.startswith("retroreflective"): + + from io_scs_tools.internals.shaders.eut2.retroreflective import Retroreflective as Shader + elif effect.startswith("unlit.tex"): from io_scs_tools.internals.shaders.eut2.unlit_tex import UnlitTex as Shader diff --git a/addon/io_scs_tools/internals/shaders/eut2/dif_spec_oclu/__init__.py b/addon/io_scs_tools/internals/shaders/eut2/dif_spec_oclu/__init__.py index 9f0ed1a..6fc4a1e 100644 --- a/addon/io_scs_tools/internals/shaders/eut2/dif_spec_oclu/__init__.py +++ b/addon/io_scs_tools/internals/shaders/eut2/dif_spec_oclu/__init__.py @@ -58,9 +58,6 @@ def init(node_tree): spec_mult_n = node_tree.nodes[DifSpec.SPEC_MULT_NODE] vcol_mult_n = node_tree.nodes[DifSpec.VCOLOR_MULT_NODE] - # delete existing - node_tree.nodes.remove(node_tree.nodes[DifSpec.OPACITY_NODE]) - # move existing for node in node_tree.nodes: if node.location.x > start_pos_x + pos_x_shift: @@ -136,67 +133,3 @@ def set_oclu_uv(node_tree, uv_layer): uv_layer = _MESH_consts.none_uv node_tree.nodes[DifSpecOclu.SEC_GEOM_NODE].uv_layer = uv_layer - - @staticmethod - def set_alpha_test_flavor(node_tree, switch_on): - """Set alpha test flavor to this shader. - - :param node_tree: node tree of current shader - :type node_tree: bpy.types.NodeTree - :param switch_on: flag indication if alpha test should be switched on or off - :type switch_on: bool - """ - - if switch_on and not blend_over.is_set(node_tree): - out_node = node_tree.nodes[DifSpec.OUT_MAT_NODE] - in_node = node_tree.nodes[DifSpec.VCOL_GROUP_NODE] - - location = (out_node.location.x - 185 * 2, out_node.location.y - 500) - - alpha_test.init(node_tree, location, in_node.outputs['Vertex Color Alpha'], out_node.inputs['Alpha']) - else: - alpha_test.delete(node_tree) - - @staticmethod - def set_blend_over_flavor(node_tree, switch_on): - """Set blend over flavor to this shader. - - :param node_tree: node tree of current shader - :type node_tree: bpy.types.NodeTree - :param switch_on: flag indication if blend over should be switched on or off - :type switch_on: bool - """ - - # remove alpha test flavor if it was set already. Because these two can not coexist - if alpha_test.is_set(node_tree): - DifSpec.set_alpha_test_flavor(node_tree, False) - - out_node = node_tree.nodes[DifSpec.OUT_MAT_NODE] - in_node = node_tree.nodes[DifSpec.VCOL_GROUP_NODE] - - if switch_on: - blend_over.init(node_tree, in_node.outputs['Vertex Color Alpha'], out_node.inputs['Alpha']) - else: - blend_over.delete(node_tree) - - @staticmethod - def set_blend_add_flavor(node_tree, switch_on): - """Set blend add flavor to this shader. - - :param node_tree: node tree of current shader - :type node_tree: bpy.types.NodeTree - :param switch_on: flag indication if blend add should be switched on or off - :type switch_on: bool - """ - - # remove alpha test flavor if it was set already. Because these two can not coexist - if alpha_test.is_set(node_tree): - DifSpec.set_alpha_test_flavor(node_tree, False) - - out_node = node_tree.nodes[DifSpec.OUT_MAT_NODE] - in_node = node_tree.nodes[DifSpec.VCOL_GROUP_NODE] - - if switch_on: - blend_add.init(node_tree, in_node.outputs['Vertex Color Alpha'], out_node.inputs['Alpha']) - else: - blend_add.delete(node_tree) diff --git a/addon/io_scs_tools/internals/shaders/eut2/retroreflective/__init__.py b/addon/io_scs_tools/internals/shaders/eut2/retroreflective/__init__.py new file mode 100644 index 0000000..343f843 --- /dev/null +++ b/addon/io_scs_tools/internals/shaders/eut2/retroreflective/__init__.py @@ -0,0 +1,110 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# Copyright (C) 2019: SCS Software + + +from io_scs_tools.internals.shaders.eut2.dif import Dif +from io_scs_tools.internals.shaders.flavors import blend_over + + +class Retroreflective(Dif): + SPEC_MULT_NODE = "SpecMultiplier" + + @staticmethod + def get_name(): + """Get name of this shader file with full modules path.""" + return __name__ + + @staticmethod + def init(node_tree): + Retroreflective.init(node_tree) + + @staticmethod + def init(node_tree): + """Initialize node tree with links for this shader. + + :param node_tree: node tree on which this shader should be created + :type node_tree: bpy.types.NodeTree + """ + + pos_x_shift = 185 + + # init parent + Dif.init(node_tree) + + opacity_n = node_tree.nodes[Dif.OPACITY_NODE] + vcol_mult_n = node_tree.nodes[Dif.VCOLOR_MULT_NODE] + out_mat_n = node_tree.nodes[Dif.OUT_MAT_NODE] + compose_lighting_n = node_tree.nodes[Dif.COMPOSE_LIGHTING_NODE] + + # delete existing + node_tree.nodes.remove(node_tree.nodes[Dif.SPEC_COL_NODE]) + node_tree.nodes.remove(node_tree.nodes[Dif.DIFF_COL_NODE]) + node_tree.nodes.remove(node_tree.nodes[Dif.DIFF_MULT_NODE]) + + # node creation + spec_mult_n = node_tree.nodes.new("ShaderNodeMath") + spec_mult_n.name = Retroreflective.SPEC_MULT_NODE + spec_mult_n.label = Retroreflective.SPEC_MULT_NODE + spec_mult_n.location = (opacity_n.location[0] + pos_x_shift, opacity_n.location[1]) + spec_mult_n.operation = "MULTIPLY" + spec_mult_n.inputs[1].default_value = 0.2 # used for spcular color designed for the best visual on traffic signs + + # links creation + node_tree.links.new(spec_mult_n.inputs[0], opacity_n.outputs['Value']) + + node_tree.links.new(compose_lighting_n.inputs['Diffuse Color'], vcol_mult_n.outputs['Color']) + node_tree.links.new(out_mat_n.inputs['Color'], vcol_mult_n.outputs['Color']) + + node_tree.links.new(out_mat_n.inputs['Spec'], spec_mult_n.outputs['Value']) + + @staticmethod + def set_material(node_tree, material): + """Set output material for this shader. + + :param node_tree: node tree of current shader + :type node_tree: bpy.types.NodeTree + :param material: blender material for used in this tree node as output + :type material: bpy.types.Material + """ + + # set hardcoded shininness + material.specular_hardness = 60 + + Dif.set_material(node_tree, material) + + @staticmethod + def set_retroreflective_decal_flavor(node_tree, switch_on): + """Set depth retroreflective decal flavor to this shader. + NOTE: this is essentially same flavor as blend_over, thus just use blend over internally + + :param node_tree: node tree of current shader + :type node_tree: bpy.types.NodeTree + :param switch_on: flag indication if retroreflective decal should be switched on or off + :type switch_on: bool + """ + + # remove alpha test flavor if it was set already. Because these two can not coexist + out_mat_node = node_tree.nodes[Dif.OUT_MAT_NODE] + opacity_n = node_tree.nodes[Dif.OPACITY_NODE] + + if switch_on: + blend_over.init(node_tree, opacity_n.outputs['Value'], out_mat_node.inputs['Alpha']) + else: + blend_over.delete(node_tree) diff --git a/addon/io_scs_tools/internals/shaders/shader.py b/addon/io_scs_tools/internals/shaders/shader.py index 84f9b1d..0ef35bc 100644 --- a/addon/io_scs_tools/internals/shaders/shader.py +++ b/addon/io_scs_tools/internals/shaders/shader.py @@ -47,7 +47,7 @@ def setup_nodes(material, effect, attr_dict, tex_dict, recreate): flavors["alpha_test"] = material.use_transparency = True material.transparency_method = "Z_TRANSPARENCY" - if (effect.endswith(".over") or ".over." in effect) and ".over.dif" not in effect: + if (effect.endswith(".over") or ".over." in effect) and ".over.dif" not in effect and ".retroreflective" not in effect: flavors["blend_over"] = material.use_transparency = True material.transparency_method = "Z_TRANSPARENCY" @@ -99,6 +99,10 @@ def setup_nodes(material, effect, attr_dict, tex_dict, recreate): if effect.endswith(".paint") or ".paint." in effect: flavors["paint"] = True + if effect.endswith(".decal.over") and ".retroreflective" in effect: + flavors["retroreflective_decal"] = material.use_transparency = True + material.transparency_method = "Z_TRANSPARENCY" + __setup_nodes__(material, effect, attr_dict, tex_dict, {}, flavors, recreate) diff --git a/addon/io_scs_tools/internals/structure.py b/addon/io_scs_tools/internals/structure.py index 1cc6a29..9418143 100755 --- a/addon/io_scs_tools/internals/structure.py +++ b/addon/io_scs_tools/internals/structure.py @@ -165,16 +165,18 @@ def get_prop_as_color(self, prop_name): return prop_value - def get_prop(self, prop_name): + def get_prop(self, prop_name, default=None): """Gets properety from unit. :param prop_name: name of the property we are searching for :type prop_name: str + :param default: default value that should be returned if property not found + :type default: any :return: None if property not found, otherwise object representing it's data :rtype: None|object """ if prop_name not in self.props: - return None + return default return self.props[prop_name] diff --git a/addon/io_scs_tools/operators/material.py b/addon/io_scs_tools/operators/material.py index 9c28475..01cd245 100644 --- a/addon/io_scs_tools/operators/material.py +++ b/addon/io_scs_tools/operators/material.py @@ -211,6 +211,8 @@ class WriteThrough(bpy.types.Operator): "( Ctrl + Click to WT on other SCS Root Objects on same look, " "Ctrl + Shift + Click to WT all looks of all SCS Root Objects )" ) + bl_options = {'REGISTER', 'UNDO'} + property_str = StringProperty( description="String representing which property should be written through.", default="", @@ -396,6 +398,7 @@ class LoadAliasedMaterial(bpy.types.Operator): bl_label = "Load Aliased Mat" bl_idname = "material.load_aliased_material" bl_description = "Load values from aliased material." + bl_options = {'REGISTER', 'UNDO'} @classmethod def poll(cls, context): @@ -575,6 +578,7 @@ class SelectShaderTextureFilePath(bpy.types.Operator): bl_label = "Select Shader Texture File" bl_idname = "material.scs_select_shader_texture_filepath" bl_description = "Open a Texture file browser" + bl_options = {'REGISTER', 'UNDO'} shader_texture = bpy.props.StringProperty(options={'HIDDEN'}) filepath = StringProperty( diff --git a/addon/io_scs_tools/operators/mesh.py b/addon/io_scs_tools/operators/mesh.py index ea6e326..793043f 100644 --- a/addon/io_scs_tools/operators/mesh.py +++ b/addon/io_scs_tools/operators/mesh.py @@ -447,17 +447,21 @@ def modal(self, context, event): active_obj = self.__get_active_object__() active_object_changed = active_obj != context.active_object + + # abort immeadiatelly if active object was changed + if active_object_changed: + self.cancel(context) + return {'CANCELLED'} + is_object_mode_changed = self.__active_object_mode != context.active_object.mode # allow changing into the edit mode as user might go there just to reselect # masked faces on which he wants to paint - if is_object_mode_changed and context.active_object.mode == "EDIT" and not active_object_changed: + if is_object_mode_changed and context.active_object.mode == "EDIT": return {'PASS_THROUGH'} - # abort if: - # 1. active object mode has changed - # 2. if user changed active object - if is_object_mode_changed or active_object_changed: + # abort if active object mode has changed + if is_object_mode_changed: self.cancel(context) return {'CANCELLED'} @@ -510,7 +514,10 @@ def cancel(self, context): # finish operator execution - go back to object mode if active_obj.mode == "VERTEX_PAINT": - bpy.ops.object.mode_set(mode="OBJECT") + override = context.copy() + override['mode'] = "OBJECT" + override['active_object'] = active_obj + bpy.ops.object.mode_set(override) # one last time rebake start_time = time() diff --git a/addon/io_scs_tools/operators/object.py b/addon/io_scs_tools/operators/object.py index 3003d1d..b3bd6b2 100755 --- a/addon/io_scs_tools/operators/object.py +++ b/addon/io_scs_tools/operators/object.py @@ -1190,6 +1190,7 @@ class FixHookups(bpy.types.Operator): bl_label = "Fix SCS Hookup Names on Model Locators" bl_idname = "object.scs_fix_model_locator_hookups" bl_description = "Tries to convert existing pure hookup ids to valid hookup name (valid Hookup Library is required)." + bl_options = {'REGISTER', 'UNDO'} def execute(self, context): lprint("D " + self.bl_label + "...") @@ -1274,6 +1275,7 @@ class AssignTerrainPoints(bpy.types.Operator): bl_idname = "object.assign_terrain_points" bl_description = str("Assigns terrain point to currently selected prefab Control Node " "(confirm requested if some vertices from this mesh are already assigned).") + bl_options = {'REGISTER', 'UNDO'} vg_name = StringProperty() """Name of the vertex group for terrain points. It consists of vertex group prefix and node index.""" @@ -1333,6 +1335,7 @@ class ClearTerrainPointsOperator(bpy.types.Operator): bl_label = "Clear All Terrain Points" bl_idname = "object.clear_all_terrain_points" bl_description = "Clears all terrain points for currently selected prefab Control Node" + bl_options = {'REGISTER', 'UNDO'} @classmethod def poll(cls, context): @@ -1788,6 +1791,7 @@ class ConnectPrefabLocators(bpy.types.Operator): bl_label = "Connect Prefab Locators" bl_idname = "object.connect_prefab_locators" bl_description = "To connect prefab locators two of them must be selected and they have to be same type" + bl_options = {'REGISTER', 'UNDO'} def execute(self, context): @@ -1822,6 +1826,7 @@ class DisconnectPrefabLocators(bpy.types.Operator): bl_label = "Disconnect Prefab Locators" bl_idname = "object.disconnect_prefab_locators" bl_description = "To disconnect navigation points two connected prefab locators must be selected" + bl_options = {'REGISTER', 'UNDO'} def execute(self, context): @@ -2009,6 +2014,7 @@ class PreviewModelPath(bpy.types.Operator): bl_label = "Select Preview Model (*.pim)" bl_idname = "object.select_preview_model_path" bl_description = "Open a file browser" + bl_options = {'REGISTER', 'UNDO'} filepath = StringProperty( name="Preview Model File Path", @@ -2062,6 +2068,7 @@ class CreateSCSRootObject(bpy.types.Operator): bl_idname = "object.create_scs_root_object" bl_description = "Create a new 'SCS Root Object' with initial setup. If any objects are selected," \ "they automatically become a part of the new 'SCS Game Object'." + bl_options = {'REGISTER', 'UNDO'} def execute(self, context): lprint('D Create New SCS Root Object...') @@ -2074,6 +2081,7 @@ class CreateSCSRootObjectDialog(bpy.types.Operator): bl_idname = "object.create_scs_root_object_dialog" bl_description = "Create a new 'SCS Root Object' with initial setup.\nIf any objects are selected," \ "they automatically become a part of the new 'SCS Game Object'." + bl_options = {'REGISTER', 'UNDO'} def execute(self, context): lprint('D Create New SCS Root Object with Name dialog...') @@ -2254,6 +2262,7 @@ class AddObject(bpy.types.Operator): bl_idname = "object.scs_add_object" bl_description = "Create SCS object of choosen type at 3D coursor position \n" \ "(when locator is created it will also be parented to SCS Root, if currently active)." + bl_options = {'REGISTER', 'UNDO'} # create function for retrieving items so custom icons can be used def new_object_type_items(self, context): diff --git a/addon/io_scs_tools/operators/scene.py b/addon/io_scs_tools/operators/scene.py index f705769..85e493e 100644 --- a/addon/io_scs_tools/operators/scene.py +++ b/addon/io_scs_tools/operators/scene.py @@ -20,6 +20,7 @@ import bpy import bmesh +import numpy import os import subprocess from collections import OrderedDict @@ -1414,23 +1415,24 @@ class PaintjobTools: """ class ImportFromDataSII(bpy.types.Operator): - bl_label = "Import SCS Truck From data.sii" + bl_label = "Import SCS Vehicle From data.sii" bl_idname = "scene.scs_import_from_data_sii" - bl_description = ("Import all models having paintable parts of a truck (including upgrades)" - "from choosen '/def/vehicle/truck//data.sii' file.") + bl_description = ("Import all models having paintable parts of a vehicle (including upgrades)" + "from choosen '/def/vehicle///data.sii' file.") bl_options = set() directory = StringProperty( - name="Import Truck", + name="Import Vehicle", subtype='DIR_PATH', ) filepath = StringProperty( - name="Truck 'data.sii' filepath", - description="File path to truck 'data.sii", + name="Vehicle 'data.sii' filepath", + description="File path to vehicle 'data.sii", subtype='FILE_PATH', ) filter_glob = StringProperty(default="*.sii", options={'HIDDEN'}) + vehicle_type = _PT_consts.VehicleTypes.NONE start_time = None # saving start time when initialize is called # saving old settings from scs globals @@ -1563,13 +1565,15 @@ def import_and_clean_model(context, project_path, model_path): return curr_scs_root @staticmethod - def add_model_to_group(scs_root, group_name_prefix, linked_to_defs=set()): + def add_model_to_group(scs_root, model_type, model_name, linked_to_defs=set()): """Adds model to group so it can be distinguished amongs all other models. :param scs_root: blender object representing SCS Root :type scs_root: bpy.types.Object - :param group_name_prefix: prefix name for - :type group_name_prefix: str + :param model_type: type of the model (chassis, cabin, upgrade) + :type model_type: str + :param model_name: name of the model + :type model_name: str :param linked_to_defs: set of the sii file paths where this model was defined :type linked_to_defs: set[str] """ @@ -1583,15 +1587,17 @@ def add_model_to_group(scs_root, group_name_prefix, linked_to_defs=set()): variant = _sii_container.get_unit_property(sii_container, "variant") if variant is not None: - used_variants_by_linked_defs.add(variant) + used_variants_by_linked_defs.add(variant.lower()) else: # if no variant specified "default" is used by game, so add it to our set used_variants_by_linked_defs.add("default") # create groups per variant for i, variant in enumerate(scs_root.scs_object_variant_inventory): + variant_name = variant.name.lower() + # do not create groups for unused variants - if variant.name not in used_variants_by_linked_defs: + if variant_name not in used_variants_by_linked_defs: continue bpy.ops.object.select_all(action="DESELECT") @@ -1600,7 +1606,8 @@ def add_model_to_group(scs_root, group_name_prefix, linked_to_defs=set()): override["active_object"] = scs_root # operator searches for scs root from active object, so make sure context will be correct bpy.ops.object.switch_variant_selection(override, select_type=_OP_consts.SelectionType.select, variant_index=i) - group = bpy.data.groups.new(group_name_prefix + " | " + variant.name) + group_name = model_type + " | " + model_name + " | " + variant_name + group = bpy.data.groups.new(group_name) mesh_objects_count = 0 for obj in scs_root.children: @@ -1619,6 +1626,7 @@ def add_model_to_group(scs_root, group_name_prefix, linked_to_defs=set()): bpy.data.groups.remove(group, do_unlink=True) continue + group[_PT_consts.model_variant_prop] = variant_name group[_PT_consts.model_refs_to_sii] = list(linked_to_defs) obj = bpy.data.objects.new(_PT_consts.export_tag_obj_name + "_" + str(len(bpy.data.groups)), None) @@ -1665,6 +1673,8 @@ def initalize(self): scs_globals.import_pic_file = False scs_globals.import_use_welding = False + self.vehicle_type = _PT_consts.VehicleTypes.NONE + def finalize(self): """Restore scs globals settings to the state they were before. """ @@ -1688,8 +1698,12 @@ def execute(self, context): data_sii_container = _sii_container.get_data_from_file(data_sii_path) # initial checkups - if not _sii_container.has_valid_unit_instance(data_sii_container, unit_type="accessory_truck_data", req_props=("fallback",)): - message = "Chosen file is not a valid truck 'data.sii' file!" + if _sii_container.has_valid_unit_instance(data_sii_container, unit_type="accessory_truck_data", req_props=("fallback",)): + self.vehicle_type = _PT_consts.VehicleTypes.TRUCK + elif _sii_container.has_valid_unit_instance(data_sii_container, unit_type="accessory_trailer_data", req_props=("info",)): + self.vehicle_type = _PT_consts.VehicleTypes.TRAILER + else: + message = "Chosen file is not a valid vehicle 'data.sii' file!" lprint("E " + message) self.report({'ERROR'}, message) self.finalize() @@ -1703,10 +1717,10 @@ def execute(self, context): # first find path of whole game project game_project_path = dir_path - for _ in range(0, 4): # we can simply go 4 dirs up, as def has to be properly placed /def/vehicle/truck/ + for _ in range(0, 4): # we can simply go 4 dirs up, as def has to be properly placed /def/vehicle// game_project_path = _path_utils.readable_norm(os.path.join(game_project_path, os.pardir)) - truck_sub_dir = os.path.relpath(dir_path, game_project_path) + vehicle_sub_dir = os.path.relpath(dir_path, game_project_path) game_project_path = os.path.join(game_project_path, os.pardir) # if data.sii was inside dlc or mod we have to go up once more in filesystem level @@ -1725,55 +1739,63 @@ def execute(self, context): ################################## # - # 2. import truck models + # 2. import vehicle models # ################################## - # collect all models paths for truck chassis and cabins - truck_model_paths = {} # holds list of SII files that each model was referenced from {KEY: model path, VALUE: list of SII paths} + # collect all models paths for vehicle chassis, cabins and possible trailer body + vehicle_model_paths = {} # holds list of SII files that each model was referenced from {KEY: model path, VALUE: list of SII paths} for project_path in project_paths: - truck_def_dirpath = os.path.join(project_path, truck_sub_dir) + vehicle_def_dirpath = os.path.join(project_path, vehicle_sub_dir) - target_dirpath = os.path.join(truck_def_dirpath, "chassis") + target_dirpath = os.path.join(vehicle_def_dirpath, "chassis") curr_models = self.gather_model_paths(target_dirpath, "accessory_chassis_data", ("detail_model", "model")) - self.update_model_paths_dict(truck_model_paths, curr_models) + self.update_model_paths_dict(vehicle_model_paths, curr_models) + + if self.vehicle_type == _PT_consts.VehicleTypes.TRUCK: + + target_dirpath = os.path.join(vehicle_def_dirpath, "cabin") + curr_models = self.gather_model_paths(target_dirpath, "accessory_cabin_data", ("detail_model", "model")) + self.update_model_paths_dict(vehicle_model_paths, curr_models) + + elif self.vehicle_type == _PT_consts.VehicleTypes.TRAILER: - target_dirpath = os.path.join(truck_def_dirpath, "cabin") - curr_models = self.gather_model_paths(target_dirpath, "accessory_cabin_data", ("detail_model", "model")) - self.update_model_paths_dict(truck_model_paths, curr_models) + target_dirpath = os.path.join(vehicle_def_dirpath, "body") + curr_models = self.gather_model_paths(target_dirpath, "accessory_trailer_body_data", ("detail_model", "model")) + self.update_model_paths_dict(vehicle_model_paths, curr_models) - lprint("S Truck Paths:\n%r" % truck_model_paths) + lprint("S Vehicle Paths:\n%r" % vehicle_model_paths) # import and properly group imported models possible_upgrade_locators = {} # dictionary holding all locators that can be used as candidates for upgrades positioning already_imported = set() # set holding imported path of already imported model, to avoid double importing - multiple_project_truck_models = set() # set of model paths found in multiple projects (for reporting purposes) + multiple_project_vehicle_models = set() # set of model paths found in multiple projects (for reporting purposes) for project_path in project_paths: - for truck_model_path in truck_model_paths: + for vehicle_model_path in vehicle_model_paths: - model_path = os.path.join(project_path, truck_model_path.lstrip("/")) + model_path = os.path.join(project_path, vehicle_model_path.lstrip("/")) # initial checks if not os.path.isfile(model_path + ".pim"): continue - if truck_model_path in already_imported: - multiple_project_truck_models.add(truck_model_path) + if vehicle_model_path in already_imported: + multiple_project_vehicle_models.add(vehicle_model_path) continue - already_imported.add(truck_model_path) + already_imported.add(vehicle_model_path) # import model - curr_truck_scs_root = self.import_and_clean_model(context, project_path, model_path) + curr_vehicle_scs_root = self.import_and_clean_model(context, project_path, model_path) # truck did not have any paintable parts, go to next - if curr_truck_scs_root is None: + if curr_vehicle_scs_root is None: continue # collect all locators as candidates for being used for upgrades positioning - for obj in curr_truck_scs_root.children: + for obj in curr_vehicle_scs_root.children: if obj.type != "EMPTY" or obj.scs_props.empty_object_type != "Locator": continue @@ -1781,11 +1803,14 @@ def execute(self, context): possible_upgrade_locators[obj.name] = obj # put imported model into it's own groups per variant - self.add_model_to_group(curr_truck_scs_root, "truck | " + os.path.basename(truck_model_path), truck_model_paths[truck_model_path]) + self.add_model_to_group(curr_vehicle_scs_root, + self.vehicle_type, + os.path.basename(vehicle_model_path), + vehicle_model_paths[vehicle_model_path]) - # if none truck models were properly imported it makes no sense to go forward on upgrades + # if none vehicle models were properly imported it makes no sense to go forward on upgrades if len(already_imported) <= 0: - message = "No truck models properly imported!" + message = "No vehicle models properly imported!" lprint("E " + message) self.report({"ERROR"}, message) self.finalize() @@ -1797,33 +1822,33 @@ def execute(self, context): # ################################## - # collect all upgrade models, by listing all upgrades directories in all projects for this truck + # collect all upgrade models, by listing all upgrades directories in all projects for this vehicle upgrade_model_paths = {} # model paths dictionary {key: upgrade type (eg "f_intake_cab"); value: set of model paths for this upgrade} for project_path in project_paths: # collect any possible upgrade over all projects - truck_accessory_def_dirpath = os.path.join(project_path, truck_sub_dir) - truck_accessory_def_dirpath = os.path.join(truck_accessory_def_dirpath, "accessory") + vehicle_accessory_def_dirpath = os.path.join(project_path, vehicle_sub_dir) + vehicle_accessory_def_dirpath = os.path.join(vehicle_accessory_def_dirpath, "accessory") # if current project path doesn't have accessories defined just skip it - if not os.path.isdir(truck_accessory_def_dirpath): + if not os.path.isdir(vehicle_accessory_def_dirpath): continue - for upgrade_name in os.listdir(truck_accessory_def_dirpath): + for upgrade_type in os.listdir(vehicle_accessory_def_dirpath): # ignore files - if not os.path.isdir(os.path.join(truck_accessory_def_dirpath, upgrade_name)): + if not os.path.isdir(os.path.join(vehicle_accessory_def_dirpath, upgrade_type)): continue - if upgrade_name not in upgrade_model_paths: - upgrade_model_paths[upgrade_name] = {} + if upgrade_type not in upgrade_model_paths: + upgrade_model_paths[upgrade_type] = {} - target_dirpath = os.path.join(truck_accessory_def_dirpath, upgrade_name) + target_dirpath = os.path.join(vehicle_accessory_def_dirpath, upgrade_type) curr_models = self.gather_model_paths(target_dirpath, "accessory_addon_data", ("exterior_model",)) - self.update_model_paths_dict(upgrade_model_paths[upgrade_name], curr_models) + self.update_model_paths_dict(upgrade_model_paths[upgrade_type], curr_models) - if len(upgrade_model_paths[upgrade_name]) <= 0: # if no models for upgrade, remove set also - del upgrade_model_paths[upgrade_name] + if len(upgrade_model_paths[upgrade_type]) <= 0: # if no models for upgrade, remove set also + del upgrade_model_paths[upgrade_type] # import models, group and position them properly already_imported = set() # set holding imported path of already imported model, to avoid double importing @@ -1839,11 +1864,14 @@ def execute(self, context): if not os.path.isfile(model_path + ".pim"): continue - if upgrade_model_path in already_imported: - multiple_project_upgrade_models.add(upgrade_model_path) + # construct key for checking already imported models by same type of upgrade + model_path_key = upgrade_type + " | " + upgrade_model_path + + if model_path_key in already_imported: + multiple_project_upgrade_models.add(model_path_key) continue - already_imported.add(upgrade_model_path) + already_imported.add(model_path_key) # import model curr_upgrade_scs_root = self.import_and_clean_model(context, project_path, model_path) @@ -1852,7 +1880,9 @@ def execute(self, context): continue # put imported model into it's own groups - self.add_model_to_group(curr_upgrade_scs_root, upgrade_type + " | " + os.path.basename(upgrade_model_path), + self.add_model_to_group(curr_upgrade_scs_root, + upgrade_type, + os.path.basename(upgrade_model_path), upgrade_model_paths[upgrade_type][upgrade_model_path]) # find upgrade locator by prefix & position upgrade by locator aka make parent on it @@ -1865,7 +1895,7 @@ def execute(self, context): # Now we are trying to find "perfect" match, which is found, # when matched prefixed upgrade locator is also assigned to at least one group. # This way we eliminate locators that are in variants - # not used by any chassis or cabin of our truck. + # not used by any chassis, cabin or trailer body of our vehicle. # However cases involving "suitable_for" fields are not covered here! if upgrade_locator is None: @@ -1885,13 +1915,14 @@ def execute(self, context): curr_upgrade_scs_root.parent = upgrade_locator # on the end report multiple project model problems - if len(multiple_project_truck_models) > 0: - lprint("W Truck models found in multiple projects, one from 'mod_' or 'dlc_' project was used! Multiple project models:") - for truck_model_path in multiple_project_truck_models: - lprint("W %r", (truck_model_path,)) + if len(multiple_project_vehicle_models) > 0: + lprint("W Same vehicle models referenced by multiple SIIsprojects, one from 'mod_' or 'dlc_' project was used! Multiple project " + "models:") + for vehicle_model_path in multiple_project_vehicle_models: + lprint("W %r", (vehicle_model_path,)) if len(multiple_project_upgrade_models) > 0: - lprint("W Upgrade models found in multiple projects, one from 'mod_' or 'dlc_' project was used! Multiple project models:") + lprint("W Same upgrade models referenced by multiple upgrades SIIs or multiple projects ('mod_' or 'dlc_'):") for upgrade_model_path in multiple_project_upgrade_models: lprint("W %r", (upgrade_model_path,)) @@ -1941,6 +1972,21 @@ class ExportUVLayoutAndMesh(bpy.types.Operator): default=True ) + export_id_mask = BoolProperty( + name="Export ID Mask", + description="Should be id mask marking texture portions be exported?", + default=True + ) + + id_mask_alpha = FloatProperty( + name="ID Mask Color Alpha", + description="Alpha value of ID color when exporting ID Mask\n" + "(For debugging purposes of texture portion overlaying, value 0.5 is advised otherwise 1.0 should be used.)", + default=0.5, + min=0.1, + max=1.0 + ) + export_mesh = BoolProperty( name="Export Mesh as OBJ", description="Should OBJ mesh also be exported?", @@ -2021,6 +2067,15 @@ def cleanup(*args): for mesh in meshes: bpy.data.meshes.remove(mesh, do_unlink=True) + @staticmethod + def cleanup_meshes(): + """Cleanups any meshes with zero users that might be left-overs from join operator. + """ + + for m in bpy.data.meshes: + if m.users == 0: + bpy.data.meshes.remove(m) + def check(self, context): if self.layout_sii_selection_mode: @@ -2042,6 +2097,9 @@ def draw(self, context): col.label("What to export?", icon='QUESTION') col.prop(self, "export_2nd_uvs") col.prop(self, "export_3rd_uvs") + col.prop(self, "export_id_mask") + if self.export_id_mask: + col.prop(self, "id_mask_alpha", slider=True) col.prop(self, "export_mesh") def do_report(self, type, message, do_report=False): @@ -2114,7 +2172,8 @@ def execute(self, context): context.scene.objects.active = context.selected_objects[0] override = context.copy() override["selected_objects"] = context.selected_objects - bpy.ops.object.join(override) # NOTE: this operator leaves old meshes behind, but for now we won't solve this issue + bpy.ops.object.join(override) + self.cleanup_meshes() curr_merged_object = context.selected_objects[0] curr_truckpaint_mat = None @@ -2196,9 +2255,17 @@ def execute(self, context): texture_portions[unit_id] = texture_portion + # do model sii validation + for unit_id in texture_portions: + texture_portion = texture_portions[unit_id] + + if not texture_portion.get_prop("model_sii") and not texture_portion.get_prop("is_master", False): + self.do_report({'ERROR'}, "Invalid texture portion with name %r as 'model_sii' is not defined!" % unit_id, do_report=True) + return {'CANCELLED'} + lprint("S Found texture portions: %r", (texture_portions.keys(),)) - # 2. bind each merged object to it's texture portion and filter to three categories: + # 2. bind each merged object to it's texture portion and filter to four categories: # objects which are independently exported by transformation defined in their texture potion (be it original or parent portion) independent_export_objects = {} @@ -2207,6 +2274,8 @@ def execute(self, context): # objects which are (direct or indirect) children of master export objects; # they have to be duplicated & included in master objects before export (to see uvs on all master layouts) master_child_export_objects = {} + # objects without referenced SII, which won't be exported and should be removed manually via cleanup method + unconfigured_objects = [] for obj in merged_objects_to_export: group = merged_objects_to_export[obj] @@ -2217,6 +2286,10 @@ def execute(self, context): model_sii = texture_portions[unit_id].get_prop("model_sii") + # ignore texture portions without reference to model sii + if not model_sii: + continue + for reference_to_sii in group[_PT_consts.model_refs_to_sii]: # yep we found possible sii of the model, but not quite yet @@ -2225,11 +2298,13 @@ def execute(self, context): sii_cont = _sii_container.get_data_from_file(reference_to_sii) variant = _sii_container.get_unit_property(sii_cont, "variant") - if not variant: - variant = "default" # if variant is not specified in sii, our games use default + if variant: + variant = variant.lower() + else: + variant = "default" # if variant is not specified in sii, our games uses default # now check variant: if it's the same then we have it! - if variant == group.name.split(" | ")[2]: # yep, 3rd split of group name suggest variant + if variant == group[_PT_consts.model_variant_prop]: texture_portion = texture_portions[unit_id] break @@ -2243,7 +2318,8 @@ def execute(self, context): self.do_report({'WARNING'}, "Model %r wasn't referenced by any SII defined in paintjob configuration metadata, please reconfigure!\n\t " - "SII files from which model was referenced:\n\t %s" % (group.name, referenced_siis), do_report=True) + "SII files from which model was referenced:\n\t %s" % (group.name, referenced_siis)) + unconfigured_objects.append(obj) continue # filter out objects using master texture portions @@ -2266,13 +2342,17 @@ def execute(self, context): else: independent_export_objects[obj] = texture_portion # even if it has parent it's exported independent; no duplicates needed + # cleanup unconfigured objects + if len(unconfigured_objects) > 0: + self.cleanup(unconfigured_objects) + # nonsense to go further if nothing to export if len(independent_export_objects) + len(master_export_objects) <= 0: self.do_report({"ERROR"}, "Nothing to export, independent objects: %s, master objects: %s, objects with master parent: %s!" % (len(independent_export_objects), len(master_export_objects), len(master_child_export_objects)), do_report=True) - self.cleanup(merged_objects_to_export) + self.cleanup(merged_objects_to_export.keys()) return {'CANCELLED'} # 3. do uv transformations and distribute master children objects: @@ -2298,6 +2378,7 @@ def execute(self, context): override = context.copy() override["selected_objects"] = context.selected_objects bpy.ops.object.join(override) + self.cleanup_meshes() bpy.data.objects.remove(obj, do_unlink=True) @@ -2325,6 +2406,7 @@ def execute(self, context): override = context.copy() override["selected_objects"] = context.selected_objects bpy.ops.object.join(override) + self.cleanup_meshes() ################################## # @@ -2383,6 +2465,76 @@ def execute(self, context): # remove final merged object now as we done our work here bpy.data.objects.remove(final_merged_object, do_unlink=True) + ################################## + # + # 5. export texture portions id mask + # + ################################## + + if self.export_id_mask: + + start_time = time() + + # intialize pixel values for id mask texture + img_pixels = [0.0] * common_texture_size[0] * common_texture_size[1] * 4 + + id_mask_color_idx = 0 + for unit_id in texture_portions: + + # ignore portions with parent attribute, they don't use it's own texture space + if texture_portions[unit_id].get_prop("parent") is not None: + continue + + position = [float(i) for i in texture_portions[unit_id].get_prop("position")] + size = [float(i) for i in texture_portions[unit_id].get_prop("size")] + + portion_width = round(size[0] * common_texture_size[0]) + portion_height = round(size[1] * common_texture_size[1]) + portion_pos_x = round(position[0] * common_texture_size[0]) + portion_pos_y = round(position[1] * common_texture_size[1]) + + # calculate this portion color from RGB values + portion_col = list(_PT_consts.id_mask_colors[id_mask_color_idx % len(_PT_consts.id_mask_colors)]) + id_mask_color_idx += 1 + portion_col[0] /= 255.0 + portion_col[1] /= 255.0 + portion_col[2] /= 255.0 + portion_col.append(self.id_mask_alpha) + + # define array buffers for application of id masking color + img_pixels_buffer = numpy.array([0.0] * 4 * portion_width) + portion_col_buffer = numpy.array(portion_col * portion_width) + + # write proper pixels row by row + for row_i in range(0, portion_height): + + start_px = (row_i + portion_pos_y) * common_texture_size[0] * 4 + portion_pos_x * 4 + end_px = start_px + portion_width * 4 + + img_pixels_buffer[:] = img_pixels[start_px:end_px] + img_pixels_buffer += portion_col_buffer + + img_pixels[start_px:end_px] = img_pixels_buffer + + # create image data block + img = bpy.data.images.new("tmp_img", common_texture_size[0], common_texture_size[1], alpha=True) + img.colorspace_settings.name = "sRGB" # make sure we use sRGB color-profile + img.use_alpha = True + img.pixels[:] = img_pixels + + # save + scene = bpy.context.scene + scene.render.image_settings.file_format = "PNG" + scene.render.image_settings.color_mode = "RGBA" + img.save_render(self.filepath + ".id_mask.png", bpy.context.scene) + + # remove image data-block, as we don't need it anymore + img.buffers_free() + bpy.data.images.remove(img, do_unlink=True) + + lprint("I Exported ID mask texture in %.2f sec!" % (time() - start_time)) + + self.do_report({'INFO'}, "Somehow we made it to the end, if no warning & errros appeared, then you are perfect!", do_report=True) return {'FINISHED'} def invoke(self, context, event): @@ -2396,6 +2548,126 @@ class GeneratePaintjob(bpy.types.Operator): bl_description = "Generates complete setup for given paintjob: definitions, TGAs & TOBJs." bl_options = {'INTERNAL'} + class Overrides: + """Class encapsulating paintjob overrides creation and export.""" + PROPS_KEY = "props" + ACC_LIST_KEY = "accessories" + + __overrides = OrderedDict() + + @staticmethod + def __generate_props_hash(props): + hash_str = "" + + for key in props: + hash_str += str(props[key]) + + return hash_str + + def __init__(self): + self.__overrides = OrderedDict() + + def __append_to_acc_list(self, props_hash, acc_type_id): + """Add accessory to proper override list depending on given properties hash + + NOTE: There is no check if override with given properties hash exists! So check it before. + + :param props_hash: hash of properties values under which this override will be saved + :type props_hash: str + :param acc_type_id: accessory type and id as concatenated string divided by dot (.) + :type acc_type_id: str + """ + if acc_type_id in self.__overrides[props_hash][self.ACC_LIST_KEY]: + return + + self.__overrides[props_hash][self.ACC_LIST_KEY].add(acc_type_id) + + def __create_override(self, props_hash, acc_type_id, props): + """Creates and saves override structure. + + One override structure is represented with dictonary of two members: + 1. props -> all paintjob properties saved in this override + 2. accessory list -> list of accessoreis compatible with this override + + NOTE: There is no check if override with given propreties hash already exists! So check it before. + + :param props_hash: hash of properties values under which this override will be saved + :type props_hash: str + :param acc_type_id: accessory type and id as concatenated string divided by dot (.) + :type acc_type_id: str + :param props: properties used in this override + :type props: OrderedDict[str, any] + """ + self.__overrides[props_hash] = { + self.PROPS_KEY: props, + self.ACC_LIST_KEY: {acc_type_id, } + } + + def add_accessory(self, acc_type_id, props=OrderedDict()): + """Add acceesory to overides. + + :param acc_type_id: accessory type and id as concatenated string divided by dot (. -> exhaust_m.mg01) + :type acc_type_id: str + :param props: simple paint job properites of this accessory + :type props: OrderedDict[str, any] + """ + props_hash = self.__generate_props_hash(props) + + if props_hash in self.__overrides: + self.__append_to_acc_list(props_hash, acc_type_id) + else: + self.__create_override(props_hash, acc_type_id, props) + + def export_to_sii(self, op_inst, config_path): + """Exports overrides in given config path. + + :param op_inst: generate paintjob operator instance, to bi able to get default values of paintjob properties + :type op_inst: bpy.types.Operator + :param config_path: absolute filepath where overrides should be exported: + /def/vehicle///paint_job/accessory/.sii + :type config_path: str + :return: True if export was successful, False otherwise + :rtype: bool + """ + export_units = [] + + # 1. collect overrides as units + for props_hash in self.__overrides: + override = self.__overrides[props_hash] + + # if no accessories for current override ignore it + if len(override[self.ACC_LIST_KEY]) == 0: + continue + + unit = _UnitData("simple_paint_job_data", ".ovr%i" % len(export_units)) + + # export extra properties only if different than default value + pj_props = override[self.PROPS_KEY] + for key in pj_props: + assert op_inst.append_prop_if_not_default(unit, "pjs_" + key, pj_props[key]) + + # as it can happen now that we don't have any properties in our unit, then it's useless to export it + if len(unit.props) == 0: + continue + + # now fill accessory list + unit.props["acc_list"] = [] + for acc_type_id in sorted(override[self.ACC_LIST_KEY]): + unit.props["acc_list"].append(acc_type_id) + + # finally add it to export units + export_units.append(unit) + + # 2. export overrides + return _sii_container.write_data_to_file(config_path, tuple(export_units), create_dirs=True) + + vehicle_type = _PT_consts.VehicleTypes.NONE + + img_node = None + premul_node = None + translate_node = None + viewer_node = None + config_meta_filepath = StringProperty( description="File path to paintjob configuration SII file." ) @@ -2481,156 +2753,232 @@ def do_report(the_type, message, do_report=False): lprint(prefix + message, report_errors=do_report, report_warnings=do_report) - def export_texture(self, orig_img, paintjob_path, texture_portion): + def initialize_nodes(self, context, img): + """Initializes nodes and scene proeprties to be able to use compositor for rendering of texture portions TGAs. + + :param context: blender context + :type context: bpy.types.Context + :param img: original big common texture image + :type img: bpy.types.Image + """ + + # ensure compositing and scene node tree + + context.scene.render.use_compositing = True + context.scene.use_nodes = True + + _X_SHIFT = 250 + + tree = context.scene.node_tree + nodes = tree.nodes + links = tree.links + + # remove any existing nodes (they are created by defoult when switching to compositiong) + + while len(nodes) > 0: + nodes.remove(nodes[0]) + + # create nodes + + self.img_node = nodes.new(type="CompositorNodeImage") + self.img_node.location = (-_X_SHIFT, 0) + self.img_node.image = img + + self.premul_node = nodes.new(type="CompositorNodePremulKey") + self.premul_node.location = (0, 0) + self.premul_node.mapping = 'STRAIGHT_TO_PREMUL' + + self.translate_node = nodes.new(type="CompositorNodeTranslate") + self.translate_node.location = (_X_SHIFT, 0) + + self.viewer_node = nodes.new(type="CompositorNodeComposite") + self.viewer_node.location = (_X_SHIFT * 2, 0) + self.viewer_node.use_alpha = True + + # create links + + links.new(self.premul_node.inputs["Image"], self.img_node.outputs["Image"]) + links.new(self.translate_node.inputs["Image"], self.premul_node.outputs["Image"]) + links.new(self.viewer_node.inputs["Image"], self.translate_node.outputs["Image"]) + + def export_texture(self, orig_img, tgas_dir_path, texture_portion): """Export given texture portion into given paintjob path. :param orig_img: Blender image datablock representing common texture :type orig_img: bpy.types.Image - :param paintjob_path: absolute path to export TGA and TOBJ to - :type paintjob_path: str + :param tgas_dir_path: absolute directory path to export TGA and TOBJ to + :type tgas_dir_path: str :param texture_portion: texture portion defining portion position and size :type texture_portion: io_scs_tools.internals.structure.UnitData - :return: True if export was successful, otherwise False - :rtype: bool + :return: TOBJ path of exported texture, in case sth went wrong return None + :rtype: str | None """ position = [float(i) for i in texture_portion.get_prop("position")] size = [float(i) for i in texture_portion.get_prop("size")] + is_master = bool(texture_portion.get_prop("is_master")) orig_img_width = orig_img.size[0] orig_img_height = orig_img.size[1] - orig_img_start_x = int(orig_img_width * position[0]) - orig_img_start_y = int(orig_img_height * position[1]) + orig_img_start_x = round(orig_img_width * position[0]) + orig_img_start_y = round(orig_img_height * position[1]) - img_width = int(orig_img_width * size[0]) - img_height = int(orig_img_height * size[1]) + img_width = round(orig_img_width * size[0]) + img_height = round(orig_img_height * size[1]) - # create copied data - # We "invoke get" for original image pixels only on the rows where actual portion is positioned. - # Additionally we do that in chunks, so we take only part of the height, - # gaining smaller RAM usage as getting pixels from original image really eats it up. - # In case of having portion of 4kx4k, getting pixels from original image can take up to 7GB rams, - # which isn't really what user might have available. - img_pixels = [0.0] * img_width * img_height * 4 - orig_img_pixels = [] - rows_in_chunk = 1024 # this size of chunk seems to work the best for ration of used ram/export speed + # setup compositing nodes for current texture + # NOTE: render clips original image by it's resolution and is centered on the output image, + # thus we have to translate our original image so that portion texture is in the middle of the render - is_img_single_color = self.optimize_single_color_textures # if no optimization then we can already mark image as not a single color - comparing_pixel = [0.0] * 4 # for computation of single color image - for row in range(0, img_height): + orig_img_mid_x = orig_img_start_x + (img_width / 2) + orig_img_mid_y = orig_img_start_y + (img_height / 2) - # on the beginning of the chunk refill pixels from original image - if row % rows_in_chunk == 0: + self.translate_node.inputs["X"].default_value = (orig_img_width / 2) - orig_img_mid_x + self.translate_node.inputs["Y"].default_value = (orig_img_height / 2) - orig_img_mid_y - start_pixel = (orig_img_start_y + row) * orig_img_width * 4 - end_pixel = start_pixel + orig_img_width * rows_in_chunk * 4 - end_pixel = min(end_pixel, start_pixel + orig_img_width * img_height * 4) + # we encode texture name with portion position and size, thus any possible duplicates will end up in same texture + tga_name = "pjm_at_%ix%i_size_%ix%i.tga" % (orig_img_start_x, + orig_img_start_y, + img_width, + img_height) + tga_path = os.path.join(tgas_dir_path, tga_name) - orig_img_pixels = orig_img.pixels[start_pixel:end_pixel] + # export texture portion image by rendering compositor - # use first pixel of current texture portion for comparison - if row == 0: - comparing_pixel = orig_img_pixels[0:4] + scene = bpy.context.scene + scene.render.image_settings.file_format = "TARGA" + scene.render.image_settings.color_mode = "RGBA" if self.export_alpha else "RGB" + scene.render.resolution_percentage = 100 + scene.render.resolution_x = img_width + scene.render.resolution_y = img_height + scene.render.filepath = tga_path + bpy.ops.render.render(write_still=True) - orig_start_pixel = (row % rows_in_chunk) * orig_img_width * 4 + orig_img_start_x * 4 - orig_end_pixel = orig_start_pixel + img_width * 4 + # if no optimization or is master then we can skip optimization processing, + # otherwise texture is re-opened and analyzed and + # in case only one color is inside, we export 4x4 texture and update tga path - start_pixel = row * img_width * 4 - end_pixel = start_pixel + img_width * 4 + if self.optimize_single_color_textures and not is_master: - img_pixels[start_pixel:end_pixel] = orig_img_pixels[orig_start_pixel:orig_end_pixel] + lprint("I Analyzing texture for single color...") - # compare for single color image only until all searched pixels have the same color - if is_img_single_color: + is_img_single_color = True + img = bpy.data.images.load(tga_path, check_existing=True) - for i in range(orig_start_pixel, orig_end_pixel, 4): + _CHUNK_SIZE = 2048 * 1024 # testing showed that this chunk size works best + rows_in_chunk = round(_CHUNK_SIZE / img_width) + buffer = [] + comparing_pixel = [0.0] * 4 + for row in range(0, img_height): + + # on the beginning of the chunk refill pixels from image + if row % rows_in_chunk == 0: + + start_pixel = row * img_width * 4 + end_pixel = start_pixel + img_width * rows_in_chunk * 4 + end_pixel = min(end_pixel, img_width * img_height * 4) + + buffer = img.pixels[start_pixel:end_pixel] + + # use first pixel for comparison + if row == 0: + comparing_pixel = buffer[0:4] - # mark image as none single color as soon as first different pixel is found and break for loop - if orig_img_pixels[i:i + 4] != comparing_pixel: + start_px = (row % rows_in_chunk) * img_width * 4 + for i in range(0, img_width * 4, 4): + if buffer[(start_px + i):(start_px + i + 4)] != comparing_pixel: is_img_single_color = False break - # create new texture and copy over data or copy only 4x4 if only one color is detected in the texture - if is_img_single_color: + if not is_img_single_color: + break - img_width = img_height = 4 - img_pixels[:] = img_pixels[0:64] + # already exported image not longer needed in Blender, so remove it! - lprint("I Texture portion %r has only one color in common texture, optimizing it by exporting 4x4px TGA!", - (texture_portion.id,)) + img.buffers_free() + bpy.data.images.remove(img, do_unlink=True) - img = bpy.data.images.new(texture_portion.id, img_width, img_height, alpha=True) - img.colorspace_settings.name = "sRGB" # make sure we use sRGB color-profile - img.use_alpha = True - img.pixels[:] = img_pixels + # finally export single texture - # save - scene = bpy.context.scene - scene.render.image_settings.file_format = "TARGA" - scene.render.image_settings.color_mode = "RGBA" if self.export_alpha else "RGB" - img.save_render(paintjob_path, bpy.context.scene) + if is_img_single_color: + + # don't forget to remove previously exported big TGA + os.remove(tga_path) + + _SINGLE_COLOR_IMAGE_NAME = "single_color_image" + + if _SINGLE_COLOR_IMAGE_NAME not in bpy.data.images: + bpy.data.images.new(_SINGLE_COLOR_IMAGE_NAME, 4, 4, alpha=True) + + img = bpy.data.images[_SINGLE_COLOR_IMAGE_NAME] + img.colorspace_settings.name = "sRGB" # make sure we use sRGB color-profile + img.use_alpha = True + img.pixels[:] = comparing_pixel * 16 + + # we use shared prefix for 4x4 textures in case any other portion will be using same one + tga_name = "shared_%.2x%.2x%.2x%.2x.tga" % (int(comparing_pixel[0] * 255.0), + int(comparing_pixel[1] * 255.0), + int(comparing_pixel[2] * 255.0), + int(comparing_pixel[3] * 255.0)) + tga_path = os.path.join(tgas_dir_path, tga_name) + + img.save_render(tga_path, bpy.context.scene) - # remove image data-block, as we don't need it anymore - orig_img.buffers_free() - bpy.data.images.remove(img, do_unlink=True) + lprint("I Texture portion %r has only one color in common texture, optimizing it by exporting 4x4px TGA!", (texture_portion.id,)) # write TOBJ beside tga file + + tobj_path = tga_path[:-4] + ".tobj" tobj_cont = _TobjContainer() tobj_cont.map_type = "2d" - tobj_cont.map_names.append(os.path.basename(paintjob_path)) + tobj_cont.map_names.append(tga_name) tobj_cont.addr.append("clamp_to_edge") tobj_cont.addr.append("clamp_to_edge") - tobj_cont.filepath = paintjob_path[:-4] + ".tobj" + tobj_cont.filepath = tobj_path + + # if there is any error by writting just return none + if not tobj_cont.write_data_to_file(): + return None - return tobj_cont.write_data_to_file() + return tobj_path - def export_sii(self, config_path, pj_token, pj_full_unit_name, pj_props, is_master=False, master_model_sii_unit_name=None): - """Export SII configuration file into given absolute path. + def export_master_sii(self, config_path, pj_token, pj_full_unit_name, pj_props, suitable_for=None): + """Export master SII configuration file into given absolute path. :param config_path: absolute file path for SII where it should be epxorted :type config_path: str :param pj_token: string of original paintjob token got from original TGA file name :type pj_token: str - :param pj_full_unit_name: for master paintjob: ..paint_job, otherwise .simplepj + :param pj_full_unit_name: ..paint_job :type pj_full_unit_name: str :param pj_props: dictionary of sii unit attributes to be writen in sii (key: name of attribute, value: any sii unit compatible object) :type pj_props: dict[str | object] - :param is_master: True for master paint job texture, otherwise False - :type is_master: bool - :param master_model_sii_unit_name: full unit name of referenced model inside master; if None suitable_for property won't be written - :type master_model_sii_unit_name: str | None + :param suitable_for: possible suitables of this paintjob; if None suitable_for property won't be written + :type suitable_for: list(str) | None :return: True if export was successful, False otherwise :rtype: bool """ - if is_master: - data_type = "accessory_paint_job_data" - else: - data_type = "simple_paint_job_data" - - unit = _UnitData(data_type, pj_full_unit_name) - - # write paint job settings only into master - if is_master: + unit = _UnitData("accessory_paint_job_data", pj_full_unit_name) - pj_settings_sui_name = pj_token + "_settings.sui" + pj_settings_sui_name = pj_token + "_settings.sui" - # export paint job settings SUI file - assert self.export_settings_sui(os.path.join(os.path.dirname(config_path), pj_settings_sui_name)) + # export paint job settings SUI file + assert self.export_settings_sui(os.path.join(os.path.dirname(config_path), pj_settings_sui_name)) - # write include into paint job sii - unit.props["@include"] = pj_settings_sui_name - - # create suitable to model sii unit name - if master_model_sii_unit_name: - unit.props["suitable_for"] = [master_model_sii_unit_name, ] + # write include into paint job sii + unit.props["@include"] = pj_settings_sui_name # export extra properties only if different than default value for key in pj_props: assert self.append_prop_if_not_default(unit, "pjs_" + key, pj_props[key]) + # export suitable for property + if suitable_for and len(suitable_for) > 0: + unit.props["suitable_for"] = suitable_for + # as it can happen now that we don't have any properties in our unit, then it's useless to export it if len(unit.props) == 0: lprint("I Unit has not properties thus useless to export empty SII, ignoring it: %r", (config_path,)) @@ -2654,6 +3002,11 @@ def export_settings_sui(self, settings_sui_path): unit = _UnitData("", "", is_headless=True) + # if old settings file has steam_inventory_id attribute, recover it! + old_settings_container = _sii_container.get_data_from_file(settings_sui_path, is_sui=True) + if old_settings_container and "steam_inventory_id" in old_settings_container[0].props: + unit.props["steam_inventory_id"] = int(old_settings_container[0].props["steam_inventory_id"]) + # force export of mandatory properties unit.props["name"] = self.pjs_name unit.props["price"] = self.pjs_price @@ -2741,11 +3094,22 @@ def execute(self, context): self.do_report({'WARNING'}, "Given paintjob layout META file does not exist: %r!" % self.config_meta_filepath) return {'CANCELLED'} - # get truck brand model token - brand_model_token = os.path.basename(os.path.abspath(os.path.join(self.config_meta_filepath, os.pardir))) + # get vehicle brand model token + curr_dir = os.path.abspath(os.path.join(self.config_meta_filepath, os.pardir)) + brand_model_token = os.path.basename(curr_dir) + + # get vehicle type + curr_dir = os.path.abspath(os.path.join(curr_dir, os.pardir)) + if os.path.basename(curr_dir) == _PT_consts.VehicleTypes.TRUCK: + self.vehicle_type = _PT_consts.VehicleTypes.TRUCK + elif os.path.basename(curr_dir) == _PT_consts.VehicleTypes.TRAILER: + self.vehicle_type = _PT_consts.VehicleTypes.TRAILER + else: + self.do_report({'ERROR'}, "Given paintjob layout META file is in wrong directory!") + return {'CANCELLED'} - if not self.common_texture_path.endswith(".tga"): - self.do_report({'ERROR'}, "Given common texture is not TGA file: %r!" % self.common_texture_path) + if not self.common_texture_path.endswith(".tif"): + self.do_report({'ERROR'}, "Given common texture is not TIF file: %r!" % self.common_texture_path) return {'CANCELLED'} if not os.path.isfile(self.common_texture_path): @@ -2770,13 +3134,13 @@ def execute(self, context): orig_project_path = _path_utils.readable_norm(os.path.dirname(self.common_texture_path)) - # we can simply go 5 dirs up, as paintjob has to be properly placed /vehicle/truck/upgrade/paintjob/ + # we can simply go 5 dirs up, as paintjob has to be properly placed /vehicle//upgrade/paintjob/ for _ in range(0, 5): orig_project_path = _path_utils.readable_norm(os.path.join(orig_project_path, os.pardir)) if not os.path.isdir(orig_project_path): self.do_report({'ERROR'}, "Paintjob TGA seems to be saved outside proper structure, should be inside\n" - "'/vehicle/truck/upgrade/paintjob//', instead is in:\n" + "'/vehicle//upgrade/paintjob//', instead is in:\n" "%r" % self.common_texture_path) return {'CANCELLED'} @@ -2804,13 +3168,13 @@ def execute(self, context): model_token = brand_model_dir[underscore_idx + 1:] is_common_tex_path_invalid = ( - brand_model_token != brand_token + "." + model_token or - not common_tex_dirpath.endswith("/vehicle/truck/upgrade/paintjob/" + brand_model_dir) + brand_model_token != brand_token + "." + model_token or + not common_tex_dirpath.endswith("/vehicle/" + self.vehicle_type + "/upgrade/paintjob/" + brand_model_dir) ) if is_common_tex_path_invalid: self.do_report({'ERROR'}, "Paintjob TGA file isn't saved on correct place, should be inside\n" - "'/vehicle/truck/upgrade/paintjob/%s' instead is saved in:\n" + "'/vehicle//upgrade/paintjob/%s' instead is saved in:\n" "%r." % (brand_model_token.replace(".", "_"), common_tex_dirpath)) return {'CANCELLED'} @@ -2818,7 +3182,7 @@ def execute(self, context): ################################## # - # 2. parse paintjob layout config file + # 2. parse and validate paintjob layout config file # ################################## @@ -2834,7 +3198,7 @@ def execute(self, context): common_texture_size = [int(i) for i in _sii_container.get_unit_property(pj_config_sii_container, "common_texture_size")] # get and validate texture portion unit existence - texture_portions = {} + texture_portions = OrderedDict() texture_portion_names = _sii_container.get_unit_property(pj_config_sii_container, "texture_portions") if texture_portion_names: @@ -2857,13 +3221,38 @@ def execute(self, context): # collect master portions to be able to properly export all override paintjob masks and other paint job attributes master_portions = [] + no_model_sii_master_count = 0 + model_sii_master_count = 0 + master_unit_suffixes = set() for unit_id in texture_portions: texture_portion = texture_portions[unit_id] is_master = bool(texture_portion.get_prop("is_master")) + master_unit_suffix = texture_portion.get_prop("master_unit_suffix", "") + model_sii = texture_portion.get_prop("model_sii") - if is_master is True: - master_portions.append(texture_portion) + if not is_master: + continue + + # check unique suffixes + if master_unit_suffix in master_unit_suffixes: + self.do_report({'ERROR'}, "Multiple master textures using same unit suffix: %r. " + "Make sure all unit suffixes are unique." % master_unit_suffix) + return {'CANCELLED'} + + # check for no model sii definition + if model_sii: + model_sii_master_count += 1 + else: + no_model_sii_master_count += 1 + + master_unit_suffixes.add(master_unit_suffix) + master_portions.append(texture_portion) + + if no_model_sii_master_count > 0 and (no_model_sii_master_count + model_sii_master_count) > 1: + self.do_report({'ERROR'}, "One or more master texture portions detected without model SII path. " + "Either define model SII path for all of them or use only one master portion without it!") + return {'CANCELLED'} lprint("D Found texture portions: %r", (texture_portions.keys(),)) @@ -2876,11 +3265,24 @@ def execute(self, context): common_tex_img = bpy.data.images.load(self.common_texture_path, check_existing=False) common_tex_img.use_alpha = self.export_alpha + self.initialize_nodes(context, common_tex_img) + if tuple(common_tex_img.size) != tuple(common_texture_size) and not self.export_configs_only: self.do_report({'ERROR'}, "Wrong size of common texture TGA: [%s, %s], paintjob layout META is prescribing different size: %r!" % (common_tex_img.size[0], common_tex_img.size[1], common_texture_size)) return {'CANCELLED'} + # get textures export dir + tgas_dir_path = os.path.join(common_tex_dirpath, pj_token) + + # first remove old TGAs, TOBJs if directory already exists + if os.path.isdir(tgas_dir_path): + for file in os.listdir(tgas_dir_path): + current_file_path = os.path.join(tgas_dir_path, file) + if os.path.isfile(current_file_path) and (current_file_path.endswith(".tga") or current_file_path.endswith(".tobj")): + os.remove(current_file_path) + + # do export by portion id texture_portions_tobj_paths = {} # storing TGA paths for each texture portion, used later for referencing textures in SIIs exported_portion_textures = set() # storing already exported texture portion to avoid double exporting same TGA for unit_id in texture_portions: @@ -2891,28 +3293,19 @@ def execute(self, context): texture_portion = texture_portions[unit_id] - parent = texture_portions[unit_id].get_prop("parent") - while parent: - texture_portion = _sii_container.get_unit_by_id(pj_config_sii_container, parent, texture_portion.type) - parent = texture_portion.get_prop("parent") - - # get TGA path for this texture portion - tga_path = os.path.join(common_tex_dirpath, pj_token) - tga_path = os.path.join(tga_path, texture_portion.id.lstrip(".")) + ".tga" # TGA file name is always texture portion unit id - - # save TOBJ path to dictionary for later usage in config generation - texture_portions_tobj_paths[unit_id] = tga_path[:-4] + ".tobj" - - # filter out already exported texture portions - if texture_portion.id in exported_portion_textures: + # as parented texture portions do not own texture just ignore them + if texture_portions[unit_id].get_prop("parent"): continue + # mark this portion as exported exported_portion_textures.add(texture_portion.id) - # export TGA - assert self.export_texture(common_tex_img, tga_path, texture_portion) + # export TGA & save TOBJ path to dictionary for later usage in config generation + exported_tobj_path = self.export_texture(common_tex_img, tgas_dir_path, texture_portion) + assert exported_tobj_path is not None # nothing should go wrong thus we have to assert here + texture_portions_tobj_paths[unit_id] = exported_tobj_path - lprint("I Exported: %r", (tga_path,)) + lprint("I Exported: %r", (exported_tobj_path,)) ################################## # @@ -2926,65 +3319,45 @@ def execute(self, context): game_project_path = _path_utils.readable_norm(os.path.join(game_project_path, os.pardir)) project_paths = sorted(_path_utils.get_projects_paths(game_project_path), reverse=True) # sort them so dlcs & mods have priority - truck_def_subdir = os.path.join("def/vehicle/truck", brand_model_token) - - # clean old override simple paintjob configs - for truck_part in ("cabin", "chassis", "accessory"): + vehicle_def_subdir = os.path.join("def/vehicle/" + self.vehicle_type, brand_model_token) - # config path: "/def/vehicle/truck///paint_job/" - config_path = os.path.join(orig_project_path, truck_def_subdir) - config_path = os.path.join(config_path, truck_part) + # prepare overrides and their directory path: "/def/vehicle///paint_job/accessory/" + overrides = {} - if truck_part == "accessory" and os.path.isdir(config_path): + overrides_config_dir = os.path.join(orig_project_path, vehicle_def_subdir) + overrides_config_dir = os.path.join(overrides_config_dir, "paint_job") + overrides_config_dir = os.path.join(overrides_config_dir, "accessory") - for directory in os.listdir(config_path): + # delete old overrides files for this paintjob if they exists + if os.path.isdir(overrides_config_dir): + for file in os.listdir(overrides_config_dir): + pj_config_path = os.path.join(overrides_config_dir, file) + # match beginning and end of the file name + if os.path.isfile(pj_config_path) and file.startswith(pj_token) and file.endswith(".sii"): + os.remove(pj_config_path) - # accessory_dir: "/def/vehicle/truck//accessory//paint_job/" - accessory_dir = os.path.join(config_path, directory) - accessory_dir = os.path.join(accessory_dir, "paint_job") - - if not os.path.isdir(accessory_dir): - continue - - for file in os.listdir(accessory_dir): - pj_config_path = os.path.join(accessory_dir, file) - # match beginning and end of the file name - if os.path.isfile(pj_config_path) and file.startswith(pj_token) and file.endswith(".sii"): - os.remove(pj_config_path) - - else: - - # truck_part_dir: "/def/vehicle/truck///paint_job/" - truck_part_dir = os.path.join(config_path, "paint_job") - - if os.path.isdir(truck_part_dir): - - for file in os.listdir(truck_part_dir): - pj_config_path = os.path.join(truck_part_dir, file) - # match beginning and end of the file name - if os.path.isfile(pj_config_path) and file.startswith(pj_token) and file.endswith(".sii"): - os.remove(pj_config_path) - - # iterate texture portions and write all needed configs for it + # iterate texture portions, write master configs and collect overrides for unit_id in texture_portions: texture_portion = texture_portions[unit_id] - model_sii = texture_portion.get_prop("model_sii") + model_sii = texture_portion.get_prop("model_sii", "") is_master = bool(texture_portion.get_prop("is_master")) - master_unit_suffix = texture_portion.get_prop("master_unit_suffix") + + # master can exist without model sii reference! + requires_valid_model_sii = (not is_master) or (model_sii != "") parent = curr_parent = texture_portion.get_prop("parent") while curr_parent: parent = curr_parent curr_parent = texture_portions[parent].get_prop("parent") - # don't write config for texture portions when top most parent is master + # don't collect override for texture portions when top most parent is master if parent and bool(texture_portions[parent].get_prop("is_master")): continue # check for SIIs from "model_sii" in all projects - model_sii_subpath = os.path.join(truck_def_subdir, model_sii) + model_sii_subpath = os.path.join(vehicle_def_subdir, model_sii) model_sii_path = os.path.join(orig_project_path, model_sii_subpath) @@ -2997,20 +3370,23 @@ def execute(self, context): sii_exists = True break - if not sii_exists: - lprint("E Can't find referenced 'model_sii' file for texture portion %r, aborting SII write!", (texture_portion.id,)) + if not sii_exists and requires_valid_model_sii: + lprint("E Can't find referenced 'model_sii' file for texture portion %r, aborting overrides SII write!", (texture_portion.id,)) return {'CANCELLED'} - # assamble paintjob properties that will be written in each SII (currently: paint_job_mask, ) + # assamble paintjob properties that will be written in overrides SII (currently: paint_job_mask, flake_uvscale, flake_vratio) pj_props = OrderedDict() + # collect paintjob mask texture if not self.export_configs_only: - rel_tobj_path = os.path.relpath(texture_portions_tobj_paths[unit_id], orig_project_path) + tobj_paths_unit_id = texture_portions[parent].id if parent else unit_id + rel_tobj_path = os.path.relpath(texture_portions_tobj_paths[tobj_paths_unit_id], orig_project_path) pj_props["paint_job_mask"] = _path_utils.readable_norm("/" + rel_tobj_path) - # export either master paint job config or override + # export either master paint job config or collect override if is_master: + master_unit_suffix = texture_portion.get_prop("master_unit_suffix", "") suffixed_pj_unit_name = pj_token + master_unit_suffix if _name_utils.tokenize_name(suffixed_pj_unit_name) != suffixed_pj_unit_name: lprint("E Can't tokenize generated paintjob unit name: %r for texture portion %r, aborting SII write!", @@ -3019,46 +3395,40 @@ def execute(self, context): # get model sii unit name to use it in suitable for field model_sii_cont = _sii_container.get_data_from_file(model_sii_path) - if not model_sii_cont: + if not model_sii_cont and requires_valid_model_sii: lprint("E SII is there but getting unit name from 'model_sii' failed for texture portion %r, aborting SII write!", (texture_portion.id,)) return {'CANCELLED'} # unit name of referenced model sii used for suitable_for field in master paint jobs - master_model_sii_unit_name = model_sii_cont[0].id + pj_suitable_for = [] + if model_sii_cont: + pj_suitable_for.append(model_sii_cont[0].id) - # config path: "/def/vehicle/truck//paint_job/.sii" - config_path = os.path.join(orig_project_path, truck_def_subdir) + # if there are any other suitables in master texture portion also add it + suitable_for = texture_portion.get_prop("suitable_for", []) + pj_suitable_for.extend(suitable_for) + + # config path: "/def/vehicle///paint_job/.sii" + config_path = os.path.join(orig_project_path, vehicle_def_subdir) config_path = os.path.join(config_path, "paint_job") config_path = os.path.join(config_path, suffixed_pj_unit_name + ".sii") # full paint job unit name: ..paint_job pj_full_unit_name = suffixed_pj_unit_name + "." + brand_model_token + ".paint_job" - assert self.export_sii(config_path, pj_token, pj_full_unit_name, pj_props, True, master_model_sii_unit_name) + assert self.export_master_sii(config_path, pj_token, pj_full_unit_name, pj_props, pj_suitable_for) lprint("I Created master SII config for %r: %r", (texture_portion.id, config_path)) else: model_type = str(model_sii).split("/")[0] - if model_type in ("accessory", "cabin", "chassis"): + if model_type in ("accessory", "cabin", "chassis", "body"): model_sii_cont = _sii_container.get_data_from_file(model_sii_path) - truck_acc_unit_name = model_sii_cont[0].id.split(".")[0] # first token - acc_type_unit_name = model_sii_cont[0].id.split(".")[-1] # last token - - # config path: "/def/vehicle/truck//accessory//paint_job/..sii" - config_path = os.path.join(orig_project_path, truck_def_subdir) - config_path = os.path.join(config_path, model_type) - - if model_type == "accessory": - config_path = os.path.join(config_path, acc_type_unit_name) - - config_path = os.path.join(config_path, "paint_job") - - # for overrides full paint job name is always .simplepj - pj_full_unit_name = ".simplepj" + acc_id_token = model_sii_cont[0].id.split(".")[0] # first token + acc_type_token = model_sii_cont[0].id.split(".")[-1] # last token if parent: portion_size = [float(i) for i in texture_portions[parent].get_prop("size")] @@ -3070,9 +3440,7 @@ def execute(self, context): for master_portion in master_portions: master_size = [float(i) for i in master_portion.get_prop("size")] - master_unit_suffix = master_portion.get_prop("master_unit_suffix") - - curr_config_path = os.path.join(config_path, pj_token + master_unit_suffix + "." + truck_acc_unit_name + ".sii") + master_unit_suffix = master_portion.get_prop("master_unit_suffix", "") if self.pjs_flipflake: @@ -3083,16 +3451,30 @@ def execute(self, context): # then just divide original texture height with calculated one pj_props["flake_vratio"] = portion_size[1] / (master_size[1] * portion_size[0] / master_size[0]) - assert self.export_sii(curr_config_path, pj_token, pj_full_unit_name, pj_props, False) - lprint("I Created override SII config for %r: %r", (texture_portion.id, config_path)) + # don't create override if there is no properties to override! + if len(pj_props) == 0: + continue + + # ensure current master portion has it's own overrides + config_path = os.path.join(overrides_config_dir, pj_token + master_unit_suffix + ".sii") + if config_path not in overrides: + overrides[config_path] = PaintjobTools.GeneratePaintjob.Overrides() + + # now add current accessory to overrides + overrides[config_path].add_accessory(acc_type_token + "." + acc_id_token, pj_props) else: - lprint("E Can not create paintjob config for texture portion: %r, as 'model_sii' property is not one of: " + lprint("E Can not collect override for texture portion: %r, as 'model_sii' property is not one of: " "accessory, cabin or chassis neither is texture portion marked with 'is_master'!", (texture_portion.id,)) return {'CANCELLED'} + # export overrides SII files + for config_path in overrides: + assert overrides[config_path].export_to_sii(self, config_path) + lprint("I Create override SII: %r", (config_path,)) + # finally we can remove original TGA if not self.preserve_common_texture and os.path.isfile(self.common_texture_path): os.remove(self.common_texture_path) diff --git a/addon/io_scs_tools/operators/wm.py b/addon/io_scs_tools/operators/wm.py index 0a08015..986ac5b 100644 --- a/addon/io_scs_tools/operators/wm.py +++ b/addon/io_scs_tools/operators/wm.py @@ -97,6 +97,8 @@ class Show3DViewReport(bpy.types.Operator): """Used for saving progress message inside class to be able to retrieve it on open gl draw.""" __static_abort = False """Used to propage abort message to all instances, so when abort is requested all instances will kill itself.""" + __static_scroll_pos = 0 + """Used to designate current scroll position in case not all warnings can be shown in 3D view.""" esc_abort = 0 """Used for staging ESC key press in operator: @@ -164,6 +166,14 @@ def get_lines(): lines = [] lines.extend(Show3DViewReport.__static_progress_message_l) lines.extend(Show3DViewReport.__static_message_l) + + # do scrolling + + lines_to_scroll = Show3DViewReport.__static_scroll_pos + while lines_to_scroll > 0: + lines.pop(0) + lines_to_scroll = lines_to_scroll - 1 + return lines @staticmethod @@ -200,6 +210,15 @@ def is_in_btn_area(x, y, btn_area): return (btn_area[0] < x < btn_area[1] and btn_area[2] < y < btn_area[3]) + @staticmethod + def is_scrolled(): + """Tells if text is scrolled down. + + :return: True if we are not on zero scroll position; False otherwise + :rtype: bool + """ + return Show3DViewReport.__static_scroll_pos != 0 + def __init__(self): Show3DViewReport.__static_running_instances += 1 @@ -280,7 +299,19 @@ def modal(self, context, event): if Show3DViewReport.is_in_btn_area(curr_x, curr_y, _OP_consts.View3DReport.HIDE_BTN_AREA): # show/hide Show3DViewReport.__static_is_shown = not Show3DViewReport.__static_is_shown + _view3d_utils.tag_redraw_all_view3d() + return {'RUNNING_MODAL'} + + # scrool up/down + if Show3DViewReport.is_in_btn_area(curr_x, curr_y, _OP_consts.View3DReport.SCROLLUP_BTN_AREA): + + Show3DViewReport.__static_scroll_pos = max(Show3DViewReport.__static_scroll_pos - 5, 0) + _view3d_utils.tag_redraw_all_view3d() + return {'RUNNING_MODAL'} + + elif Show3DViewReport.is_in_btn_area(curr_x, curr_y, _OP_consts.View3DReport.SCROLLDOWN_BTN_AREA): + Show3DViewReport.__static_scroll_pos = min(Show3DViewReport.__static_scroll_pos + 5, len(Show3DViewReport.__static_message_l)) _view3d_utils.tag_redraw_all_view3d() return {'RUNNING_MODAL'} diff --git a/addon/io_scs_tools/properties/material.py b/addon/io_scs_tools/properties/material.py index 38e213b..f883721 100644 --- a/addon/io_scs_tools/properties/material.py +++ b/addon/io_scs_tools/properties/material.py @@ -673,6 +673,14 @@ def update_shader_texture_reflection_settings(self, context): update=update_shader_attribute_tint_opacity ) + shader_attribute_queue_bias = IntProperty( + name="Queue Bias", + description="SCS shader 'Queue Bias' value", + default=2, + options={'HIDDEN'}, + update=__update_look__ + ) + # TEXTURE: BASE shader_texture_base = StringProperty( name="Texture Base", diff --git a/addon/io_scs_tools/properties/object.py b/addon/io_scs_tools/properties/object.py index 0353e27..7e9d49b 100644 --- a/addon/io_scs_tools/properties/object.py +++ b/addon/io_scs_tools/properties/object.py @@ -535,7 +535,7 @@ def locator_preview_model_path_update(self, context): """ obj = context.object - if _preview_models.load(obj): + if _preview_models.load(obj, deep_reload=True): # fix selection because in case of actual loading model from file selection will be messed up obj.select = True @@ -838,11 +838,14 @@ def locator_prefab_type_update(self, context): (_PL_consts.PSP.BUS_STATION, (str(_PL_consts.PSP.BUS_STATION), "Bus Station", "")), (_PL_consts.PSP.CAMERA_POINT, (str(_PL_consts.PSP.CAMERA_POINT), "Camera Point", "")), (_PL_consts.PSP.COMPANY_POS, (str(_PL_consts.PSP.COMPANY_POS), "Company Point", "")), + (_PL_consts.PSP.COMPANY_UNLOAD_POS, (str(_PL_consts.PSP.COMPANY_UNLOAD_POS), "Company Unload Point", "")), # (_PL_consts.PSP.CUSTOM, (str(_PL_consts.PSP.CUSTOM), "Custom", "")), (_PL_consts.PSP.GARAGE_POS, (str(_PL_consts.PSP.GARAGE_POS), "Garage Point", "")), (_PL_consts.PSP.GAS_POS, (str(_PL_consts.PSP.GAS_POS), "Gas Station", "")), # (_PL_consts.PSP.HOTEL, (str(_PL_consts.PSP.HOTEL), "Hotel", "")), + (_PL_consts.PSP.LONG_TRAILER_POS, (str(_PL_consts.PSP.LONG_TRAILER_POS), "Long Trailer", "")), # (_PL_consts.PSP.MEET_POS, (str(_PL_consts.PSP.MEET_POS), "Meet", "")), + (_PL_consts.PSP.TRAILER_SPAWN, (str(_PL_consts.PSP.TRAILER_SPAWN), "Owned Trailer", "")), (_PL_consts.PSP.PARKING, (str(_PL_consts.PSP.PARKING), "Parking", "")), (_PL_consts.PSP.RECRUITMENT_POS, (str(_PL_consts.PSP.RECRUITMENT_POS), "Recruitment", "")), (_PL_consts.PSP.SERVICE_POS, (str(_PL_consts.PSP.SERVICE_POS), "Service Station", "")), diff --git a/addon/io_scs_tools/shader_presets.txt b/addon/io_scs_tools/shader_presets.txt index b4086c6..54bbcf8 100644 --- a/addon/io_scs_tools/shader_presets.txt +++ b/addon/io_scs_tools/shader_presets.txt @@ -1430,6 +1430,17 @@ Shader { TexCoord: ( 0 ) } } +Shader { + PresetName: "retroreflective" + Effect: "eut2.retroreflective" + Flavors: ( "NMAP_TS|NMAP_TS_UV|NMAP_TS_16|NMAP_TS_UV_16" "RETROREFLECTIVE_DIM_ALLDIR|RETROREFLECTIVE_PIKO_ALLDIR" "RETROREFLECTIVE_DECAL" ) + Flags: 0 + Texture { + Tag: "texture[X]:texture_base" + Value: "" + TexCoord: ( 0 ) + } +} Shader { PresetName: "shadowmap" Effect: "eut2.shadowmap" @@ -1846,6 +1857,11 @@ Flavor { Flavor { Type: "DEPTH" Name: "decal" + Attribute { + Format: INT + Tag: "queue_bias" + Value: ( 0 ) + } } Flavor { Type: "ENVMAP" @@ -1963,6 +1979,18 @@ Flavor { Type: "PAINT" Name: "paint" } +Flavor { + Type: "RETROREFLECTIVE_DECAL" + Name: "decal.over" +} +Flavor { + Type: "RETROREFLECTIVE_DIM_ALLDIR" + Name: "dim.alldir" +} +Flavor { + Type: "RETROREFLECTIVE_PIKO_ALLDIR" + Name: "piko.alldir" +} Flavor { Type: "TEXGEN0" Name: "tg0" diff --git a/addon/io_scs_tools/supported_effects.bin b/addon/io_scs_tools/supported_effects.bin index a5b9611..ef6efe1 100644 Binary files a/addon/io_scs_tools/supported_effects.bin and b/addon/io_scs_tools/supported_effects.bin differ diff --git a/addon/io_scs_tools/ui/material.py b/addon/io_scs_tools/ui/material.py index d903251..64db22f 100644 --- a/addon/io_scs_tools/ui/material.py +++ b/addon/io_scs_tools/ui/material.py @@ -247,6 +247,8 @@ def _draw_shader_attribute(layout, mat, split_perc, attribute): item_space.prop(mat.scs_props, 'shader_attribute_tint', text='') elif tag == 'tint_opacity': item_space.prop(mat.scs_props, 'shader_attribute_tint_opacity', text='') + elif tag == 'queue_bias': + item_space.prop(mat.scs_props, 'shader_attribute_queue_bias', text='') elif tag.startswith("aux") and hasattr(mat.scs_props, "shader_attribute_" + tag): col = item_space.column().column(align=True) diff --git a/addon/io_scs_tools/ui/tool_shelf.py b/addon/io_scs_tools/ui/tool_shelf.py index 72e7afc..87da296 100644 --- a/addon/io_scs_tools/ui/tool_shelf.py +++ b/addon/io_scs_tools/ui/tool_shelf.py @@ -435,7 +435,7 @@ def poll(cls, context): def draw(self, context): layout = self.layout body_col = layout.column(align=True) - active_vcol_name = context.active_object.data.vertex_colors.active.name + active_vcol_name = context.vertex_paint_object.data.vertex_colors.active.name for layer_name in _VCT_consts.ColoringLayersTypes.as_list(): diff --git a/addon/io_scs_tools/utils/__init__.py b/addon/io_scs_tools/utils/__init__.py index d0ec1ed..487c04b 100644 --- a/addon/io_scs_tools/utils/__init__.py +++ b/addon/io_scs_tools/utils/__init__.py @@ -40,3 +40,15 @@ def get_scs_globals(): :rtype: io_scs_tools.properties.world.GlobalSCSProps """ return __get_world__().scs_globals + + +def ensure_scs_globals_save(): + """Function for ensuring that scs globals get's saved into the blend file, + even if world is unliked by the user. + """ + world = __get_world__() + + if world.users == 0: # no users, use fake one + world.use_fake_user = True + elif world.users >= 2: # multiple users, switch of fake one as data will get saved anyway + world.use_fake_user = False diff --git a/addon/io_scs_tools/utils/curve.py b/addon/io_scs_tools/utils/curve.py index b64ad60..a56490b 100644 --- a/addon/io_scs_tools/utils/curve.py +++ b/addon/io_scs_tools/utils/curve.py @@ -383,6 +383,9 @@ def curves_intersect(curve1_p1, curve1_t1, curve1_p2, curve1_t2, length1, start2 = smooth_curve_position(curve2_p1, curve2_t1, curve2_p2, curve2_t2, pos2 / length2) end2 = smooth_curve_position(curve2_p1, curve2_t1, curve2_p2, curve2_t2, (pos2 + step2) / length2) + if abs(start1[1] - start2[1]) > 4.0 or abs(end1[1] - end2[1]) > 4.0: + continue + denom = ((end2[2] - start2[2]) * (end1[0] - start1[0])) - ((end2[0] - start2[0]) * (end1[2] - start1[2])) nume_a = ((end2[0] - start2[0]) * (start1[2] - start2[2])) - ((end2[2] - start2[2]) * (start1[0] - start2[0])) nume_b = ((end1[0] - start1[0]) * (start1[2] - start2[2])) - ((end1[2] - start1[2]) * (start1[0] - start2[0])) diff --git a/addon/io_scs_tools/utils/material.py b/addon/io_scs_tools/utils/material.py index 4cb1904..852a8de 100644 --- a/addon/io_scs_tools/utils/material.py +++ b/addon/io_scs_tools/utils/material.py @@ -307,7 +307,7 @@ def set_shader_data_to_material(material, section, is_import=False, override_bac created_attributes[attribute_type] = material.scs_props["shader_attribute_" + attribute_type] - elif attribute_type in ("shininess", "add_ambient", "reflection", "reflection2", "shadow_bias", "tint_opacity"): + elif attribute_type in ("shininess", "add_ambient", "reflection", "reflection2", "shadow_bias", "tint_opacity", "queue_bias"): if not old_value: material.scs_props["shader_attribute_" + attribute_type] = attribute_data['Value'][0] diff --git a/addon/io_scs_tools/utils/mesh.py b/addon/io_scs_tools/utils/mesh.py index 1a0205a..54be0ff 100644 --- a/addon/io_scs_tools/utils/mesh.py +++ b/addon/io_scs_tools/utils/mesh.py @@ -401,13 +401,22 @@ def bm_make_vc_layer(pim_version, bm, vc_layer_name, vc_layer_data, multiplier=1 :param vc_layer_data: Vertex Color Layer data :type vc_layer_data: list """ + # only 5 and 7 versions are supported currently + assert (pim_version == 5 or pim_version == 7) + color_lay = bm.loops.layers.color.new(vc_layer_name) - color_a_lay = bm.loops.layers.color.new(vc_layer_name + _MESH_consts.vcol_a_suffix) + + vc_alpha_layer_name = vc_layer_name + _MESH_consts.vcol_a_suffix + if pim_version == 5 and len(vc_layer_data[0]) == 4: + color_a_lay = bm.loops.layers.color.new(vc_alpha_layer_name) + elif pim_version == 7 and len(vc_layer_data[0][0]) == 4: + color_a_lay = bm.loops.layers.color.new(vc_alpha_layer_name) + for face_i, face in enumerate(bm.faces): f_v = [x.index for x in face.verts] for loop_i, loop in enumerate(face.loops): - alpha = 1.0 - if pim_version < 6: + alpha = -1.0 + if pim_version == 5: if len(vc_layer_data[0]) == 3: vcol = vc_layer_data[f_v[loop_i]] else: @@ -421,9 +430,12 @@ def bm_make_vc_layer(pim_version, bm, vc_layer_name, vc_layer_data, multiplier=1 alpha = vc_layer_data[face_i][loop_i][3] vcol = (vcol[0] / 2 / multiplier, vcol[1] / 2 / multiplier, vcol[2] / 2 / multiplier) - vcol_a = (alpha / 2 / multiplier,) * 3 loop[color_lay] = vcol - loop[color_a_lay] = vcol_a + + if alpha != -1.0: + assert color_a_lay + vcol_a = (alpha / 2 / multiplier,) * 3 + loop[color_a_lay] = vcol_a def bm_delete_loose(mesh): diff --git a/addon/io_scs_tools/utils/object.py b/addon/io_scs_tools/utils/object.py index 8bcd154..9304147 100644 --- a/addon/io_scs_tools/utils/object.py +++ b/addon/io_scs_tools/utils/object.py @@ -759,7 +759,7 @@ def get_mesh(obj): if _get_scs_globals().export_output_type.startswith('EF'): if _get_scs_globals().export_apply_modifiers: if _get_scs_globals().export_exclude_edgesplit: - disabled_modifiers.append(disable_modifiers(obj, modifier_type_to_disable='EDGE_SPLIT')) + disabled_modifiers.extend(disable_modifiers(obj, modifier_type_to_disable='EDGE_SPLIT')) mesh = obj.to_mesh(scene, True, 'PREVIEW') else: mesh = obj.data @@ -768,10 +768,10 @@ def get_mesh(obj): mesh = obj.to_mesh(scene, True, 'PREVIEW') else: if _get_scs_globals().export_include_edgesplit: - disabled_modifiers.append(disable_modifiers(obj, modifier_type_to_disable='EDGE_SPLIT', inverse=True)) + disabled_modifiers.extend(disable_modifiers(obj, modifier_type_to_disable='EDGE_SPLIT', inverse=True)) mesh = obj.to_mesh(scene, True, 'PREVIEW') else: - disabled_modifiers.append(disable_modifiers(obj, modifier_type_to_disable='ANY', inverse=True)) + disabled_modifiers.extend(disable_modifiers(obj, modifier_type_to_disable='ANY', inverse=True)) mesh = obj.to_mesh(scene, True, 'PREVIEW') restore_modifiers(obj, disabled_modifiers)