forked from bartvdbraak/blender
remove redundant []'s for list comprehension's, py2.4+ supports this.
This commit is contained in:
parent
ce639f18d6
commit
aaf328dc78
@ -100,7 +100,7 @@ def write_png(buf, width, height):
|
||||
|
||||
# reverse the vertical line order and add null bytes at the start
|
||||
width_byte_4 = width * 4
|
||||
raw_data = b"".join([b'\x00' + buf[span:span + width_byte_4] for span in range((height - 1) * width * 4, -1, - width_byte_4)])
|
||||
raw_data = b"".join(b'\x00' + buf[span:span + width_byte_4] for span in range((height - 1) * width * 4, -1, - width_byte_4))
|
||||
|
||||
def png_pack(png_tag, data):
|
||||
chunk_head = png_tag + data
|
||||
|
@ -220,7 +220,7 @@ class InfoPropertyRNA:
|
||||
# special case for floats
|
||||
if len(self.default) > 0:
|
||||
if self.type == "float":
|
||||
self.default_str = "(%s)" % ", ".join([float_as_string(f) for f in self.default])
|
||||
self.default_str = "(%s)" % ", ".join(float_as_string(f) for f in self.default)
|
||||
if not self.default_str:
|
||||
self.default_str = str(self.default)
|
||||
else:
|
||||
@ -247,7 +247,7 @@ class InfoPropertyRNA:
|
||||
if self.type in ("float", "int"):
|
||||
type_str += " in [%s, %s]" % (range_str(self.min), range_str(self.max))
|
||||
elif self.type == "enum":
|
||||
type_str += " in [%s]" % ', '.join([("'%s'" % s) for s in self.enum_items])
|
||||
type_str += " in [%s]" % ", ".join(("'%s'" % s) for s in self.enum_items)
|
||||
|
||||
if not (as_arg or as_ret):
|
||||
# write default property, ignore function args for this
|
||||
|
@ -307,7 +307,7 @@ def bvh_node_dict2objects(context, bvh_nodes, IMPORT_START_FRAME=1, IMPORT_LOOP=
|
||||
|
||||
# Parent the objects
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bvh_node.temp.makeParent([bvh_node_child.temp for bvh_node_child in bvh_node.children], 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||
bvh_node.temp.makeParent(bvh_node_child.temp for bvh_node_child in bvh_node.children, 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||
|
||||
# Offset
|
||||
for bvh_node in bvh_nodes.values():
|
||||
|
@ -30,7 +30,7 @@ def randomize_selected(seed, loc, rot, scale, scale_even, scale_min):
|
||||
random.seed(seed)
|
||||
|
||||
def rand_vec(vec_range):
|
||||
return Vector([uniform(-val, val) for val in vec_range])
|
||||
return Vector(uniform(-val, val) for val in vec_range)
|
||||
|
||||
for obj in bpy.context.selected_objects:
|
||||
|
||||
|
@ -99,7 +99,7 @@ class PlayRenderedAnim(bpy.types.Operator):
|
||||
file_b = rd.frame_path(frame=frame_tmp)
|
||||
file_b = rd.frame_path(frame=int(frame_tmp / 10))
|
||||
|
||||
file = ''.join([(c if file_b[i] == c else "#") for i, c in enumerate(file_a)])
|
||||
file = "".join((c if file_b[i] == c else "#") for i, c in enumerate(file_a))
|
||||
else:
|
||||
# works for movies and images
|
||||
file = rd.frame_path(frame=scene.frame_start)
|
||||
|
@ -542,7 +542,7 @@ class WM_OT_doc_view(bpy.types.Operator):
|
||||
while class_obj:
|
||||
ls.insert(0, class_obj)
|
||||
class_obj = class_obj.nested
|
||||
return '.'.join([class_obj.identifier for class_obj in ls])
|
||||
return '.'.join(class_obj.identifier for class_obj in ls)
|
||||
|
||||
def execute(self, context):
|
||||
id_split = self.doc_id.split('.')
|
||||
|
@ -981,7 +981,7 @@ class USERPREF_PT_addons(bpy.types.Panel):
|
||||
if info["version"]:
|
||||
split = colsub.row().split(percentage=0.15)
|
||||
split.label(text='Version:')
|
||||
split.label(text='.'.join([str(x) for x in info["version"]]))
|
||||
split.label(text='.'.join(str(x) for x in info["version"]))
|
||||
if info["warning"]:
|
||||
split = colsub.row().split(percentage=0.15)
|
||||
split.label(text="Warning:")
|
||||
|
@ -119,7 +119,7 @@ def _merge_keymaps(kc1, kc2):
|
||||
"""
|
||||
merged_keymaps = [(km, kc1) for km in kc1.keymaps]
|
||||
if kc1 != kc2:
|
||||
merged_keymaps.extend([(km, kc2) for km in kc2.keymaps if not _km_exists_in(km, merged_keymaps)])
|
||||
merged_keymaps.extend((km, kc2) for km in kc2.keymaps if not _km_exists_in(km, merged_keymaps))
|
||||
|
||||
return merged_keymaps
|
||||
|
||||
|
@ -45,7 +45,7 @@ def api_dump(use_properties=True, use_functions=True):
|
||||
|
||||
data_str = "%s.%s(%s)" % (struct_id_str, func_id, ", ".join(args))
|
||||
if func.return_values:
|
||||
return_args = ", ".join([prop_type(arg) for arg in func.return_values])
|
||||
return_args = ", ".join(prop_type(arg) for arg in func.return_values)
|
||||
if len(func.return_values) > 1:
|
||||
data_str += " --> (%s)" % return_args
|
||||
else:
|
||||
|
@ -510,7 +510,7 @@ def rna2sphinx(BASEPATH):
|
||||
|
||||
type_descr = prop.get_type_description(**kwargs)
|
||||
if prop.name or prop.description:
|
||||
fw(ident + ":%s%s: %s\n" % (id_name, identifier, ", ".join([val for val in (prop.name, prop.description) if val])))
|
||||
fw(ident + ":%s%s: %s\n" % (id_name, identifier, ", ".join(val for val in (prop.name, prop.description) if val)))
|
||||
fw(ident + ":%s%s: %s\n" % (id_type, identifier, type_descr))
|
||||
|
||||
def write_struct(struct):
|
||||
@ -552,12 +552,12 @@ def rna2sphinx(BASEPATH):
|
||||
else:
|
||||
fw("base class --- ")
|
||||
|
||||
fw(", ".join([(":class:`%s`" % base_id) for base_id in base_ids]))
|
||||
fw(", ".join((":class:`%s`" % base_id) for base_id in base_ids))
|
||||
fw("\n\n")
|
||||
|
||||
subclass_ids = [s.identifier for s in structs.values() if s.base is struct if not rna_info.rna_id_ignore(s.identifier)]
|
||||
if subclass_ids:
|
||||
fw("subclasses --- \n" + ", ".join([(":class:`%s`" % s) for s in subclass_ids]) + "\n\n")
|
||||
fw("subclasses --- \n" + ", ".join((":class:`%s`" % s) for s in subclass_ids) + "\n\n")
|
||||
|
||||
base_id = getattr(struct.base, "identifier", "")
|
||||
|
||||
@ -595,7 +595,7 @@ def rna2sphinx(BASEPATH):
|
||||
del py_properties, py_prop
|
||||
|
||||
for func in struct.functions:
|
||||
args_str = ", ".join([prop.get_arg_default(force=False) for prop in func.args])
|
||||
args_str = ", ".join(prop.get_arg_default(force=False) for prop in func.args)
|
||||
|
||||
fw(" .. %s:: %s(%s)\n\n" % ("classmethod" if func.is_classmethod else "method", func.identifier, args_str))
|
||||
fw(" %s\n\n" % func.description)
|
||||
@ -606,7 +606,7 @@ def rna2sphinx(BASEPATH):
|
||||
if len(func.return_values) == 1:
|
||||
write_param(" ", fw, func.return_values[0], is_return=True)
|
||||
elif func.return_values: # multiple return values
|
||||
fw(" :return (%s):\n" % ", ".join([prop.identifier for prop in func.return_values]))
|
||||
fw(" :return (%s):\n" % ", ".join(prop.identifier for prop in func.return_values))
|
||||
for prop in func.return_values:
|
||||
type_descr = prop.get_type_description(as_ret=True, class_fmt=":class:`%s`")
|
||||
descr = prop.description
|
||||
@ -724,7 +724,7 @@ def rna2sphinx(BASEPATH):
|
||||
|
||||
subclass_ids = [s.identifier for s in structs.values() if s.base is None if not rna_info.rna_id_ignore(s.identifier)]
|
||||
if subclass_ids:
|
||||
fw("subclasses --- \n" + ", ".join([(":class:`%s`" % s) for s in sorted(subclass_ids)]) + "\n\n")
|
||||
fw("subclasses --- \n" + ", ".join((":class:`%s`" % s) for s in sorted(subclass_ids)) + "\n\n")
|
||||
|
||||
fw(".. class:: %s\n\n" % _BPY_STRUCT_FAKE)
|
||||
fw(" built-in base class for all classes in bpy.types.\n\n")
|
||||
@ -762,7 +762,7 @@ def rna2sphinx(BASEPATH):
|
||||
fw(".. module:: bpy.ops.%s\n\n" % op.module_name)
|
||||
last_mod = op.module_name
|
||||
|
||||
args_str = ", ".join([prop.get_arg_default(force=True) for prop in op.args])
|
||||
args_str = ", ".join(prop.get_arg_default(force=True) for prop in op.args)
|
||||
fw(".. function:: %s(%s)\n\n" % (op.func_name, args_str))
|
||||
|
||||
# if the description isn't valid, we output the standard warning
|
||||
|
Loading…
Reference in New Issue
Block a user