Skip to content

Commit

Permalink
Merge pull request #13 from ksons/fix/flake8
Browse files Browse the repository at this point in the history
Fixed code style to comply to blender style recommendations
  • Loading branch information
ksons authored Mar 6, 2018
2 parents e2ca1bd + e3b6ca7 commit 0a2e25e
Show file tree
Hide file tree
Showing 11 changed files with 66 additions and 76 deletions.
15 changes: 7 additions & 8 deletions addons/io_scene_gltf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def get_mesh(self, idx):

def get_camera(self, idx):
if idx not in self.cameras:
#TODO actually handle cameras
# TODO: actually handle cameras
camera = self.gltf['cameras'][idx]
name = camera.get('name', 'cameras[%d]' % idx)
self.cameras[idx] = bpy.data.cameras.new(name)
Expand Down Expand Up @@ -105,16 +105,15 @@ def str_to_version(s):
raise Exception('unsupported version: %s' % version)

def check_required_extensions(self):
#TODO the below works but it will make the tests fails.
# TODO: the below works but it will make the tests fails.
# Can be uncommented when KhronosGroup/glTF-Sample-Models#144
# is closed OR we implement pbrSpecularGlossiness.
pass

#for ext in self.gltf.get('extensionsRequired', []):
# for ext in self.gltf.get('extensionsRequired', []):
# if ext not in EXTENSIONS:
# raise Exception('unsupported extension was required: %s' % ext)


def load(self):
filename = self.filepath
self.base_path = os.path.dirname(filename)
Expand All @@ -134,17 +133,17 @@ def parse_glb(self, contents):
header = struct.unpack_from('<4sII', contents)
glb_version = header[1]
if glb_version != 2:
raise Exception('GLB: version not supported: %d' % version)
raise Exception('GLB: version not supported: %d' % glb_version)

def parse_chunk(offset):
header = struct.unpack_from('<I4s', contents, offset=offset)
data_len = header[0]
ty = header[1]
data = contents[offset + 8 : offset + 8 + data_len]
data = contents[offset + 8: offset + 8 + data_len]
next_offset = offset + 8 + data_len
return { 'type': ty, 'data': data, 'next_offset': next_offset }
return {'type': ty, 'data': data, 'next_offset': next_offset}

offset = 12 # end of header
offset = 12 # end of header

json_chunk = parse_chunk(offset)
if json_chunk['type'] != b'JSON':
Expand Down
1 change: 1 addition & 0 deletions addons/io_scene_gltf/animation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import bpy


def create_action(op, idx):
anim = op.gltf['animations'][idx]
name = anim.get('name', 'animations[%d]' % idx)
Expand Down
27 changes: 14 additions & 13 deletions addons/io_scene_gltf/buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
import struct


def create_buffer(op, idx):
buffer = op.gltf['buffers'][idx]

Expand All @@ -19,7 +20,7 @@ def create_buffer(op, idx):
return base64.b64decode(base64_data)

# If we got here, assume it's a filepath
buffer_location = os.path.join(op.base_path, uri) #TODO absolute paths?
buffer_location = os.path.join(op.base_path, uri) # TODO: absolute paths?
print('Loading file', buffer_location)
with open(buffer_location, 'rb') as fp:
bytes_read = fp.read()
Expand All @@ -46,12 +47,12 @@ def create_accessor(op, idx):
def create_accessor_from_properties(op, accessor):
count = accessor['count']
fmt_char_lut = dict([
(5120, 'b'), # BYTE
(5121, 'B'), # UNSIGNED_BYTE
(5122, 'h'), # SHORT
(5123, 'H'), # UNSIGNED_SHORT
(5125, 'I'), # UNSIGNED_INT
(5126, 'f') # FLOAT
(5120, 'b'), # BYTE
(5121, 'B'), # UNSIGNED_BYTE
(5122, 'h'), # SHORT
(5123, 'H'), # UNSIGNED_SHORT
(5125, 'I'), # UNSIGNED_INT
(5126, 'f') # FLOAT
])
fmt_char = fmt_char_lut[accessor['componentType']]
component_size = struct.calcsize(fmt_char)
Expand Down Expand Up @@ -91,11 +92,11 @@ def create_accessor_from_properties(op, accessor):
normalize = None
if 'normalized' in accessor and accessor['normalized']:
normalize_lut = dict([
(5120, lambda x: max(x / (2**7 - 1), -1)), # BYTE
(5121, lambda x: x / (2**8 - 1)), # UNSIGNED_BYTE
(5122, lambda x: max(x / (2**15 - 1), -1)), # SHORT
(5123, lambda x: x / (2**16 - 1)), # UNSIGNED_SHORT
(5125, lambda x: x / (2**32 - 1)) # UNSIGNED_INT
(5120, lambda x: max(x / (2**7 - 1), -1)), # BYTE
(5121, lambda x: x / (2**8 - 1)), # UNSIGNED_BYTE
(5122, lambda x: max(x / (2**15 - 1), -1)), # SHORT
(5123, lambda x: x / (2**16 - 1)), # UNSIGNED_SHORT
(5125, lambda x: x / (2**32 - 1)) # UNSIGNED_INT
])
normalize = normalize_lut[accessor['componentType']]

Expand All @@ -109,7 +110,7 @@ def create_accessor_from_properties(op, accessor):
off = accessor.get('byteOffset', 0)
result = []
while len(result) < count:
elem = struct.unpack_from(fmt, buf, offset = off)
elem = struct.unpack_from(fmt, buf, offset=off)
if normalize:
elem = tuple([normalize(x) for x in elem])
if num_components == 1:
Expand Down
18 changes: 9 additions & 9 deletions addons/io_scene_gltf/material.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def do_with_temp_file(contents, func):
tmp = tempfile.NamedTemporaryFile(delete=False)
path = tmp.name
tmp.write(contents)
tmp.close() # Have to close so func can open it
tmp.close() # Have to close so func can open it
return func(path)
finally:
if path:
Expand All @@ -38,7 +38,7 @@ def load_from_temp(path):

# Need to pack the image into the .blend file or it will go
# away as soon as the temp file is deleted.
tex_image.image.pack() #TODO decide on tradeoff for using as_png
tex_image.image.pack() # TODO: decide on tradeoff for using as_png

if 'uri' in source:
uri = source['uri']
Expand All @@ -65,7 +65,7 @@ def load_from_temp(path):
def create_pbr_group():
"""Create a node group for metallic-roughness PBR."""

#XXX IDEA
# XXX IDEA
# Use rna2xml to serialize the PBR group in KhronosGroup/glTF-Blender-Exporter
# and just import it here and get rid of this whole mess!

Expand All @@ -79,15 +79,15 @@ def create_pbr_group():

# Crap, I did this function in camelCase. Of course it's the one with
# a million variables. Stupid, stupid.
#TODO make it snake_case
# TODO: make it snake_case

baseColorFacInp = inputs.new('NodeSocketColor', 'baseColorFactor')
baseColorTexInp = inputs.new('NodeSocketColor', 'baseColorTexture')
metFacInp = inputs.new('NodeSocketFloat', 'metallicFactor')
roughFacInp = inputs.new('NodeSocketFloat', 'roughnessFactor')
metRoughTexInp = inputs.new('NodeSocketColor', 'metallicRoughnessTexture')
vertColorInp = inputs.new('NodeSocketColor', 'Vertex Color')
normalInp = inputs.new('NodeSocketNormal', 'Normal')
inputs.new('NodeSocketNormal', 'Normal')

baseColorFacInp.default_value = (1, 1, 1, 1)
baseColorTexInp.default_value = (1, 1, 1, 1)
Expand All @@ -96,7 +96,7 @@ def create_pbr_group():
metRoughTexInp.default_value = (1, 1, 1, 1)
vertColorInp.default_value = (1, 1, 1, 1)

out = outputs.new('NodeSocketShader', 'Output Shader')
outputs.new('NodeSocketShader', 'Output Shader')

inputNode = tree.nodes.new('NodeGroupInput')
inputNode.location = -962, 183
Expand Down Expand Up @@ -220,7 +220,7 @@ def create_material_from_properties(op, material, material_name):
group_node.inputs[2].default_value = metalness
group_node.inputs[3].default_value = roughness

#TODO texCoord property
# TODO texCoord property
if 'baseColorTexture' in pbr_metallic_roughness:
image_idx = pbr_metallic_roughness['baseColorTexture']['index']
tex = create_texture(op, image_idx, 'baseColorTexture', tree)
Expand All @@ -240,7 +240,7 @@ def create_material_from_properties(op, material, material_name):
normal_map_node.location = -150, -170
links.new(tex.outputs[0], normal_map_node.inputs[1])
links.new(normal_map_node.outputs[0], group_node.inputs[6])
#TODO scale
# TODO scale
if 'emissiveTexture' in material:
image_idx = material['emissiveTexture']['index']
tex = create_texture(op, image_idx, 'emissiveTexture', tree)
Expand All @@ -254,7 +254,7 @@ def create_material_from_properties(op, material, material_name):
links.new(emission_node.outputs[0], add_node.inputs[1])
links.new(add_node.outputs[0], mo.inputs[0])
mo.location = 547, -84
#TODO occlusion texture
# TODO occlusion texture

return mat

Expand Down
20 changes: 5 additions & 15 deletions addons/io_scene_gltf/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import bmesh
import bpy


def primitive_to_mesh(op, primitive, all_attributes, material_index):
"""Convert a glTF primitive object to a Blender mesh.
Expand All @@ -24,7 +25,6 @@ def primitive_to_mesh(op, primitive, all_attributes, material_index):
edges = []
faces = []


# Generate the topology

mode = primitive.get('mode', 4)
Expand All @@ -34,7 +34,7 @@ def primitive_to_mesh(op, primitive, all_attributes, material_index):
else:
indices = range(0, len(verts))

#TODO only mode TRIANGLES is tested!!
# TODO: only mode TRIANGLES is tested!!
if mode == 0:
# POINTS
pass
Expand Down Expand Up @@ -79,23 +79,17 @@ def alternate(i, xs):
me.from_pydata(verts, edges, faces)
me.validate()


# Assign material

for polygon in me.polygons:
polygon.material_index = material_index


# Assign normals

if 'NORMAL' in attributes:
normals = op.get_accessor(attributes['NORMAL'])
for i, vertex in enumerate(me.vertices):
vertex.normal = normals[i]


# Assign colors

if 'COLOR_0' in all_attributes:
me.vertex_colors.new('COLOR_0')
if 'COLOR_0' in attributes:
Expand All @@ -111,9 +105,7 @@ def alternate(i, xs):
for vert_idx, loop_idx in zip(polygon.vertices, polygon.loop_indices):
color_layer[loop_idx].color = colors[vert_idx][0:3]


# Assign texcoords

def assign_texcoords(uvs, uv_layer):
for polygon in me.polygons:
for vert_idx, loop_idx in zip(polygon.vertices, polygon.loop_indices):
Expand All @@ -128,14 +120,12 @@ def assign_texcoords(uvs, uv_layer):
if 'TEXCOORD_1' in attributes:
assign_texcoords(op.get_accessor(attributes['TEXCOORD_1']), me.uv_layers[1].data)


# Assign joints by generating vertex groups

if 'JOINTS_0' in attributes and 'WEIGHTS_0' in attributes:
# Don't seem to need to deal with all_attributes here.
# The only way I could find to set vertex groups was by
# round-tripping through a bmesh.
#TODO find a better way?
# TODO: find a better way?
joints = op.get_accessor(attributes['JOINTS_0'])
weights = op.get_accessor(attributes['WEIGHTS_0'])
bme = bmesh.new()
Expand All @@ -160,7 +150,7 @@ def create_mesh(op, idx):

# Find the union of the attributes used by each primitive.
attributes = (set(primitive['attributes'].keys()) for primitive in primitives)
all_attributes = reduce(lambda x,y: x.union(y), attributes)
all_attributes = reduce(lambda x, y: x.union(y), attributes)

bme = bmesh.new()
for i, primitive in enumerate(mesh['primitives']):
Expand All @@ -177,7 +167,7 @@ def create_mesh(op, idx):
material = op.get_default_material()
me.materials.append(material)

#TODO Do we need this?
# TODO: Do we need this?
for polygon in me.polygons:
polygon.use_smooth = True

Expand Down
15 changes: 8 additions & 7 deletions addons/io_scene_gltf/node.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import bpy
import math
from mathutils import Matrix, Quaternion, Vector

"""
Expand Down Expand Up @@ -28,18 +27,20 @@
that scene (ie. are descendants of one of the roots of the scene).
"""


def convert_matrix(m):
"""Converts a glTF matrix to a Blender matrix."""
result = Matrix([m[0:4], m[4:8], m[8:12], m[12:16]])
result.transpose() # column-major to row-major
result.transpose() # column-major to row-major
return result


def convert_quaternion(q):
"""Converts a glTF quaternion to Blender a quaternion."""
# xyzw -> wxyz
# xyzw -> wxyz
return Quaternion([q[3], q[0], q[1], q[2]])


def get_transform(node):
if 'matrix' in node:
return convert_matrix(node['matrix'])
Expand Down Expand Up @@ -70,7 +71,7 @@ def create(name, data):
ob = bpy.data.objects.new(name, data)
ob.parent = op.armature_ob

#TODO make the object a child of the bone instead? Making it a
# TODO: make the object a child of the bone instead? Making it a
# child puts it at the tail of the bone and we want it at the
# head. We'd just need to translate it along the length of the
# bone.
Expand Down Expand Up @@ -134,7 +135,7 @@ def generate_armature_object(op):
op.armature_ob = arma_ob

# Turn glTF up (+Y) into Blender up (+Z)
#TODO is this right?
# TODO is this right?
arma_ob.matrix_local = Matrix([
[1, 0, 0, 0],
[0, 0, -1, 0],
Expand All @@ -155,7 +156,7 @@ def add_bone(idx, parent, parent_mat):
bone.head = mat * Vector((0, 0, 0))
bone.tail = mat * Vector((0, 1, 0))
bone.align_roll(mat * Vector((0, 0, 1)) - bone.head)
#NOTE bones don't seem to have non-uniform scaling.
# NOTE: bones don't seem to have non-uniform scaling.
# This appears to be a serious problem for us.

op.node_to_bone_name[idx] = bone.name
Expand Down Expand Up @@ -188,7 +189,7 @@ def create_scene(op, idx):
scn = bpy.context.scene
scn.name = name
scn.render.engine = 'CYCLES'
#scn.world.use_nodes = True
# scn.world.use_nodes = True

# Always link in the whole node forest
scn.objects.link(op.armature_ob)
Expand Down
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[flake8]
max-line-length = 120
2 changes: 1 addition & 1 deletion test/generate_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@


def run_tests():
report = { 'tests': [] }
report = {'tests': []}
tests = report['tests']

files = (
Expand Down
Loading

0 comments on commit 0a2e25e

Please sign in to comment.