2009-11-01 15:21:20 +00:00
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
2010-02-12 13:34:04 +00:00
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2009-11-01 15:21:20 +00:00
#
# ##### END GPL LICENSE BLOCK #####
2004-06-07 01:34:15 +00:00
2009-12-13 14:38:30 +00:00
# <pep8 compliant>
2009-09-29 15:27:00 +00:00
import os
import time
2010-02-26 11:50:59 +00:00
import shutil
2009-09-29 15:27:00 +00:00
import bpy
2010-04-11 14:22:27 +00:00
import mathutils
2005-05-30 02:26:40 +00:00
2005-10-11 02:32:58 +00:00
def fixName ( name ) :
2009-12-13 14:00:39 +00:00
if name == None :
return ' None '
else :
return name . replace ( ' ' , ' _ ' )
2005-10-11 02:32:58 +00:00
2010-06-02 17:58:28 +00:00
def write_mtl ( scene , filepath , copy_images , mtl_dict ) :
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
world = scene . world
worldAmb = world . ambient_color
2009-09-29 15:27:00 +00:00
2010-06-02 17:58:28 +00:00
dest_dir = os . path . dirname ( filepath )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
def copy_image ( image ) :
2010-08-06 01:40:54 +00:00
fn = bpy . path . abspath ( image . filepath )
2010-02-26 11:50:59 +00:00
fn_strip = os . path . basename ( fn )
2009-12-13 14:00:39 +00:00
if copy_images :
2010-02-26 11:50:59 +00:00
rel = fn_strip
fn_abs_dest = os . path . join ( dest_dir , fn_strip )
if not os . path . exists ( fn_abs_dest ) :
shutil . copy ( fn , fn_abs_dest )
else :
rel = fn
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
return rel
2009-09-29 15:27:00 +00:00
2004-06-21 12:01:23 +00:00
2010-06-02 17:58:28 +00:00
file = open ( filepath , " w " )
2009-12-13 14:00:39 +00:00
# XXX
2010-08-09 10:35:03 +00:00
# file.write('# Blender MTL File: %s\n' % Blender.Get('filepath').split('\\')[-1].split('/')[-1])
2010-03-27 11:20:27 +00:00
file . write ( ' # Material Count: %i \n ' % len ( mtl_dict ) )
2009-12-13 14:00:39 +00:00
# Write material/image combinations we have used.
2010-03-27 11:20:27 +00:00
for key , ( mtl_mat_name , mat , img ) in mtl_dict . items ( ) :
2009-12-13 14:00:39 +00:00
# Get the Blender data for the material and the image.
# Having an image named None will make a bug, dont do it :)
file . write ( ' newmtl %s \n ' % mtl_mat_name ) # Define a new material: matname_imgname
if mat :
file . write ( ' Ns %.6f \n ' % ( ( mat . specular_hardness - 1 ) * 1.9607843137254901 ) ) # Hardness, convert blenders 1-511 to MTL's
2010-08-09 10:35:03 +00:00
file . write ( ' Ka %.6f %.6f %.6f \n ' % tuple ( [ c * mat . ambient for c in worldAmb ] ) ) # Ambient, uses mirror colour,
2009-12-13 14:00:39 +00:00
file . write ( ' Kd %.6f %.6f %.6f \n ' % tuple ( [ c * mat . diffuse_intensity for c in mat . diffuse_color ] ) ) # Diffuse
file . write ( ' Ks %.6f %.6f %.6f \n ' % tuple ( [ c * mat . specular_intensity for c in mat . specular_color ] ) ) # Specular
if hasattr ( mat , " ior " ) :
file . write ( ' Ni %.6f \n ' % mat . ior ) # Refraction index
else :
file . write ( ' Ni %.6f \n ' % 1.0 )
file . write ( ' d %.6f \n ' % mat . alpha ) # Alpha (obj uses 'd' for dissolve)
# 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
2010-08-20 06:09:58 +00:00
if mat . use_shadeless :
2009-12-13 14:00:39 +00:00
file . write ( ' illum 0 \n ' ) # ignore lighting
elif mat . specular_intensity == 0 :
file . write ( ' illum 1 \n ' ) # no specular.
else :
file . write ( ' illum 2 \n ' ) # light normaly
else :
#write a dummy material here?
file . write ( ' Ns 0 \n ' )
2010-08-09 10:35:03 +00:00
file . write ( ' Ka %.6f %.6f %.6f \n ' % tuple ( [ c for c in worldAmb ] ) ) # Ambient, uses mirror colour,
2009-12-13 14:00:39 +00:00
file . write ( ' Kd 0.8 0.8 0.8 \n ' )
file . write ( ' Ks 0.8 0.8 0.8 \n ' )
file . write ( ' d 1 \n ' ) # No alpha
file . write ( ' illum 2 \n ' ) # light normaly
# Write images!
2010-08-09 10:35:03 +00:00
if img : # We have an image on the face!
2009-12-13 14:00:39 +00:00
# write relative image path
rel = copy_image ( img )
file . write ( ' map_Kd %s \n ' % rel ) # Diffuse mapping image
2010-08-09 10:35:03 +00:00
# file.write('map_Kd %s\n' % img.filepath.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
2009-12-13 14:00:39 +00:00
elif mat : # No face image. if we havea material search for MTex image.
2010-02-07 12:51:47 +00:00
for mtex in mat . texture_slots :
2009-12-13 14:00:39 +00:00
if mtex and mtex . texture . type == ' IMAGE ' :
try :
2010-06-02 17:58:28 +00:00
filepath = copy_image ( mtex . texture . image )
2010-08-09 10:35:03 +00:00
# filepath = mtex.texture.image.filepath.split('\\')[-1].split('/')[-1]
2010-08-30 08:28:48 +00:00
file . write ( ' map_Kd %s \n ' % repr ( filepath ) [ 1 : - 1 ] ) # Diffuse mapping image
2009-12-13 14:00:39 +00:00
break
except :
# Texture has no image though its an image type, best ignore.
pass
file . write ( ' \n \n ' )
file . close ( )
2004-06-21 12:01:23 +00:00
2009-09-29 15:27:00 +00:00
# XXX not used
2006-01-29 19:17:53 +00:00
def copy_file ( source , dest ) :
2009-12-13 14:00:39 +00:00
file = open ( source , ' rb ' )
data = file . read ( )
file . close ( )
file = open ( dest , ' wb ' )
file . write ( data )
file . close ( )
2005-10-11 02:32:58 +00:00
2009-09-29 15:27:00 +00:00
# XXX not used
2006-01-29 19:17:53 +00:00
def copy_images ( dest_dir ) :
2009-12-13 14:00:39 +00:00
if dest_dir [ - 1 ] != os . sep :
dest_dir + = os . sep
2010-08-09 10:35:03 +00:00
# if dest_dir[-1] != sys.sep:
# dest_dir += sys.sep
2009-12-13 14:00:39 +00:00
# Get unique image names
uniqueImages = { }
2010-03-27 11:20:27 +00:00
for matname , mat , image in mtl_dict . values ( ) : # Only use image name
2009-12-13 14:00:39 +00:00
# Get Texface images
if image :
uniqueImages [ image ] = image # Should use sets here. wait until Python 2.4 is default.
# Get MTex images
if mat :
2010-02-07 12:51:47 +00:00
for mtex in mat . texture_slots :
2009-12-13 14:00:39 +00:00
if mtex and mtex . texture . type == ' IMAGE ' :
image_tex = mtex . texture . image
if image_tex :
try :
uniqueImages [ image_tex ] = image_tex
except :
pass
# Now copy images
copyCount = 0
2010-08-09 10:35:03 +00:00
# for bImage in uniqueImages.values():
# image_path = bpy.path.abspath(bImage.filepath)
# if bpy.sys.exists(image_path):
# # Make a name for the target path.
# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
# if not bpy.utils.exists(dest_image_path): # Image isnt already there
# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
# copy_file(image_path, dest_image_path)
# copyCount+=1
2009-09-29 15:27:00 +00:00
2010-08-09 10:35:03 +00:00
# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
2006-07-06 12:25:04 +00:00
2009-12-13 14:00:39 +00:00
print ( ' \t Copied %d images ' % copyCount )
2009-06-10 19:14:05 +00:00
2010-08-27 04:43:42 +00:00
2009-06-10 19:14:05 +00:00
def test_nurbs_compat ( ob ) :
2010-08-27 04:43:42 +00:00
if ob . type != ' CURVE ' :
2009-12-13 14:00:39 +00:00
return False
2010-08-27 04:43:42 +00:00
for nu in ob . data . splines :
if nu . point_count_v == 1 and nu . type != ' BEZIER ' : # not a surface and not bezier
2009-12-13 14:00:39 +00:00
return True
return False
2009-06-10 19:14:05 +00:00
2009-09-29 15:27:00 +00:00
2009-06-10 19:14:05 +00:00
def write_nurb ( file , ob , ob_mat ) :
2009-12-13 14:00:39 +00:00
tot_verts = 0
cu = ob . data
# use negative indices
2010-08-27 04:43:42 +00:00
for nu in cu . splines :
if nu . type == ' POLY ' :
DEG_ORDER_U = 1
else :
DEG_ORDER_U = nu . order_u - 1 # odd but tested to be correct
2009-12-13 14:00:39 +00:00
2010-08-27 04:43:42 +00:00
if nu . type == ' BEZIER ' :
2009-12-13 14:00:39 +00:00
print ( " \t Warning, bezier curve: " , ob . name , " only poly and nurbs curves supported " )
continue
2010-08-27 04:43:42 +00:00
if nu . point_count_v > 1 :
2009-12-13 14:00:39 +00:00
print ( " \t Warning, surface: " , ob . name , " only poly and nurbs curves supported " )
continue
2010-08-27 04:43:42 +00:00
if len ( nu . points ) < = DEG_ORDER_U :
print ( " \t Warning, order_u is lower then vert count, skipping: " , ob . name )
2009-12-13 14:00:39 +00:00
continue
pt_num = 0
2010-08-27 04:43:42 +00:00
do_closed = nu . use_cyclic_u
do_endpoints = ( do_closed == 0 ) and nu . use_endpoint_u
2009-12-13 14:00:39 +00:00
2010-08-27 04:43:42 +00:00
for pt in nu . points :
pt = ob_mat * pt . co . copy ( ) . resize3D ( )
2009-12-13 14:00:39 +00:00
file . write ( ' v %.6f %.6f %.6f \n ' % ( pt [ 0 ] , pt [ 1 ] , pt [ 2 ] ) )
pt_num + = 1
tot_verts + = pt_num
file . write ( ' g %s \n ' % ( fixName ( ob . name ) ) ) # fixName(ob.getData(1)) could use the data name too
file . write ( ' cstype bspline \n ' ) # not ideal, hard coded
file . write ( ' deg %d \n ' % DEG_ORDER_U ) # not used for curves but most files have it still
curve_ls = [ - ( i + 1 ) for i in range ( pt_num ) ]
# 'curv' keyword
if do_closed :
if DEG_ORDER_U == 1 :
pt_num + = 1
curve_ls . append ( - 1 )
else :
pt_num + = DEG_ORDER_U
curve_ls = curve_ls + curve_ls [ 0 : DEG_ORDER_U ]
2010-08-27 04:43:42 +00:00
file . write ( ' curv 0.0 1.0 %s \n ' % ( ' ' . join ( [ str ( i ) for i in curve_ls ] ) ) ) # Blender has no U and V values for the curve
2009-12-13 14:00:39 +00:00
# 'parm' keyword
tot_parm = ( DEG_ORDER_U + 1 ) + pt_num
tot_parm_div = float ( tot_parm - 1 )
parm_ls = [ ( i / tot_parm_div ) for i in range ( tot_parm ) ]
if do_endpoints : # end points, force param
for i in range ( DEG_ORDER_U + 1 ) :
parm_ls [ i ] = 0.0
parm_ls [ - ( 1 + i ) ] = 1.0
file . write ( ' parm u %s \n ' % ' ' . join ( [ str ( i ) for i in parm_ls ] ) )
file . write ( ' end \n ' )
return tot_verts
2009-06-10 19:14:05 +00:00
2010-08-09 10:35:03 +00:00
def write_file ( filepath , objects , scene ,
2009-12-13 14:00:39 +00:00
EXPORT_TRI = False ,
EXPORT_EDGES = False ,
EXPORT_NORMALS = False ,
EXPORT_NORMALS_HQ = False ,
EXPORT_UV = True ,
EXPORT_MTL = True ,
EXPORT_COPY_IMAGES = False ,
EXPORT_APPLY_MODIFIERS = True ,
EXPORT_ROTX90 = True ,
EXPORT_BLEN_OBS = True ,
EXPORT_GROUP_BY_OB = False ,
EXPORT_GROUP_BY_MAT = False ,
EXPORT_KEEP_VERT_ORDER = False ,
EXPORT_POLYGROUPS = False ,
EXPORT_CURVE_AS_NURBS = True ) :
'''
2010-07-17 18:08:14 +00:00
Basic write function . The context and options must be already set
2009-12-13 14:00:39 +00:00
This can be accessed externaly
eg .
write ( ' c: \\ test \\ foobar.obj ' , Blender . Object . GetSelected ( ) ) # Using default options.
'''
# XXX
import math
def veckey3d ( v ) :
return round ( v . x , 6 ) , round ( v . y , 6 ) , round ( v . z , 6 )
def veckey2d ( v ) :
return round ( v [ 0 ] , 6 ) , round ( v [ 1 ] , 6 )
# return round(v.x, 6), round(v.y, 6)
def findVertexGroupName ( face , vWeightMap ) :
"""
Searches the vertexDict to see what groups is assigned to a given face .
We use a frequency system in order to sort out the name because a given vetex can
belong to two or more groups at the same time . To find the right name for the face
we list all the possible vertex group names with their frequency and then sort by
frequency in descend order . The top element is the one shared by the highest number
of vertices is the face ' s group
"""
weightDict = { }
2010-08-18 03:42:26 +00:00
for vert_index in face . vertices :
2010-08-09 10:35:03 +00:00
# for vert in face:
2009-12-13 14:00:39 +00:00
vWeights = vWeightMap [ vert_index ]
2010-08-09 10:35:03 +00:00
# vWeights = vWeightMap[vert]
2009-12-13 14:00:39 +00:00
for vGroupName , weight in vWeights :
weightDict [ vGroupName ] = weightDict . get ( vGroupName , 0 ) + weight
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
if weightDict :
alist = [ ( weight , vGroupName ) for vGroupName , weight in weightDict . items ( ) ] # sort least to greatest amount of weight
alist . sort ( )
return ( alist [ - 1 ] [ 1 ] ) # highest value last
else :
return ' (null) '
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# TODO: implement this in C? dunno how it should be called...
def getVertsFromGroup ( me , group_index ) :
ret = [ ]
2009-09-29 15:27:00 +00:00
2010-08-18 03:42:26 +00:00
for i , v in enumerate ( me . vertices ) :
2009-12-13 14:00:39 +00:00
for g in v . groups :
if g . group == group_index :
ret . append ( ( i , g . weight ) )
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
return ret
2009-09-29 15:27:00 +00:00
2006-04-03 16:14:24 +00:00
2010-08-30 08:28:48 +00:00
print ( ' OBJ Export path: %r ' % filepath )
2009-12-13 14:00:39 +00:00
temp_mesh_name = ' ~tmp-mesh '
time1 = time . clock ( )
2010-08-09 10:35:03 +00:00
# time1 = sys.time()
# scn = Scene.GetCurrent()
2004-06-21 12:01:23 +00:00
2010-06-02 17:58:28 +00:00
file = open ( filepath , " w " )
2009-12-13 14:00:39 +00:00
# Write Header
2010-08-30 08:28:48 +00:00
file . write ( ' # Blender v %s OBJ File: %r \n ' % ( bpy . app . version_string , os . path . basename ( bpy . data . filepath ) ) )
2010-04-05 06:45:44 +00:00
file . write ( ' # www.blender.org \n ' )
2009-12-13 14:00:39 +00:00
# Tell the obj file what material file to use.
if EXPORT_MTL :
2010-08-30 08:28:48 +00:00
mtlfilepath = os . path . splitext ( filepath ) [ 0 ] + " .mtl "
file . write ( ' mtllib %s \n ' % repr ( os . path . basename ( mtlfilepath ) ) [ 1 : - 1 ] ) # filepath can contain non utf8 chars, use repr
2009-12-13 14:00:39 +00:00
if EXPORT_ROTX90 :
2010-08-11 16:40:36 +00:00
mat_xrot90 = mathutils . Matrix . Rotation ( - math . pi / 2 , 4 , ' X ' )
2009-12-13 14:00:39 +00:00
# Initialize totals, these are updated each object
totverts = totuvco = totno = 1
face_vert_index = 1
globalNormals = { }
2010-03-27 11:20:27 +00:00
# A Dict of Materials
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
mtl_dict = { }
2009-12-13 14:00:39 +00:00
# Get all meshes
for ob_main in objects :
# ignore dupli children
if ob_main . parent and ob_main . parent . dupli_type != ' NONE ' :
# XXX
print ( ob_main . name , ' is a dupli child - ignoring ' )
continue
obs = [ ]
if ob_main . dupli_type != ' NONE ' :
# XXX
print ( ' creating dupli_list on ' , ob_main . name )
2010-04-06 01:28:39 +00:00
ob_main . create_dupli_list ( scene )
2009-12-13 14:00:39 +00:00
obs = [ ( dob . object , dob . matrix ) for dob in ob_main . dupli_list ]
# XXX debug print
print ( ob_main . name , ' has ' , len ( obs ) , ' dupli children ' )
else :
2010-07-03 17:39:29 +00:00
obs = [ ( ob_main , ob_main . matrix_world ) ]
2009-12-13 14:00:39 +00:00
for ob , ob_mat in obs :
2010-08-27 04:43:42 +00:00
# Nurbs curve support
if EXPORT_CURVE_AS_NURBS and test_nurbs_compat ( ob ) :
if EXPORT_ROTX90 :
ob_mat = ob_mat * mat_xrot90
totverts + = write_nurb ( file , ob , ob_mat )
continue
# END NURBS
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if ob . type != ' MESH ' :
continue
2009-09-29 15:27:00 +00:00
2010-04-06 01:28:39 +00:00
me = ob . create_mesh ( scene , EXPORT_APPLY_MODIFIERS , ' PREVIEW ' )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if EXPORT_ROTX90 :
2009-12-25 20:52:05 +00:00
me . transform ( mat_xrot90 * ob_mat )
2009-12-13 14:00:39 +00:00
else :
me . transform ( ob_mat )
2009-09-29 15:27:00 +00:00
2010-08-09 10:35:03 +00:00
# # Will work for non meshes now! :)
# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
# if not me:
# continue
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if EXPORT_UV :
faceuv = len ( me . uv_textures ) > 0
2010-08-28 12:15:14 +00:00
if faceuv :
uv_layer = me . uv_textures . active . data [ : ]
2009-12-13 14:00:39 +00:00
else :
faceuv = False
2010-08-18 03:42:26 +00:00
me_verts = me . vertices [ : ]
2010-08-15 15:04:51 +00:00
2009-12-13 14:00:39 +00:00
# XXX - todo, find a better way to do triangulation
# ...removed convert_to_triface because it relies on editmesh
'''
# We have a valid mesh
if EXPORT_TRI and me . faces :
# Add a dummy object to it.
has_quads = False
for f in me . faces :
2010-08-18 03:42:26 +00:00
if f . vertices [ 3 ] != 0 :
2009-12-13 14:00:39 +00:00
has_quads = True
break
if has_quads :
2010-02-22 00:07:46 +00:00
newob = bpy . data . objects . new ( ' temp_object ' , me )
2009-12-13 14:00:39 +00:00
# if we forget to set Object.data - crash
scene . objects . link ( newob )
newob . convert_to_triface ( scene )
# mesh will still be there
scene . objects . unlink ( newob )
'''
# Make our own list so it can be sorted to reduce context switching
face_index_pairs = [ ( face , index ) for index , face in enumerate ( me . faces ) ]
# faces = [ f for f in me.faces ]
if EXPORT_EDGES :
edges = me . edges
else :
edges = [ ]
2010-08-18 03:42:26 +00:00
if not ( len ( face_index_pairs ) + len ( edges ) + len ( me . vertices ) ) : # Make sure there is somthing to write
2009-12-13 14:00:39 +00:00
# clean up
2010-01-09 23:44:01 +00:00
bpy . data . meshes . remove ( me )
2009-12-13 14:00:39 +00:00
continue # dont bother with this mesh.
# XXX
# High Quality Normals
if EXPORT_NORMALS and face_index_pairs :
me . calc_normals ( )
2010-08-09 10:35:03 +00:00
# if EXPORT_NORMALS_HQ:
# BPyMesh.meshCalcNormals(me)
# else:
# # transforming normals is incorrect
# # when the matrix is scaled,
# # better to recalculate them
# me.calcNormals()
2009-12-13 14:00:39 +00:00
materials = me . materials
materialNames = [ ]
materialItems = [ m for m in materials ]
if materials :
for mat in materials :
if mat : # !=None
materialNames . append ( mat . name )
else :
materialNames . append ( None )
# Cant use LC because some materials are None.
# materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
# Possible there null materials, will mess up indicies
# but at least it will export, wait until Blender gets fixed.
materialNames . extend ( ( 16 - len ( materialNames ) ) * [ None ] )
materialItems . extend ( ( 16 - len ( materialItems ) ) * [ None ] )
# Sort by Material, then images
# so we dont over context switch in the obj file.
if EXPORT_KEEP_VERT_ORDER :
pass
elif faceuv :
2010-08-18 07:14:10 +00:00
face_index_pairs . sort ( key = lambda a : ( a [ 0 ] . material_index , hash ( uv_layer [ a [ 1 ] ] . image ) , a [ 0 ] . use_smooth ) )
2009-12-13 14:00:39 +00:00
elif len ( materials ) > 1 :
2010-08-18 07:14:10 +00:00
face_index_pairs . sort ( key = lambda a : ( a [ 0 ] . material_index , a [ 0 ] . use_smooth ) )
2009-12-13 14:00:39 +00:00
else :
# no materials
2010-08-18 07:14:10 +00:00
face_index_pairs . sort ( key = lambda a : a [ 0 ] . use_smooth )
2010-08-09 10:35:03 +00:00
# if EXPORT_KEEP_VERT_ORDER:
# pass
# elif faceuv:
2010-08-18 07:14:10 +00:00
# try: faces.sort(key = lambda a: (a.mat, a.image, a.use_smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.use_smooth), (b.mat, b.image, b.use_smooth)))
2010-08-09 10:35:03 +00:00
# elif len(materials) > 1:
2010-08-18 07:14:10 +00:00
# try: faces.sort(key = lambda a: (a.mat, a.use_smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.use_smooth), (b.mat, b.use_smooth)))
2010-08-09 10:35:03 +00:00
# else:
# # no materials
2010-08-18 07:14:10 +00:00
# try: faces.sort(key = lambda a: a.use_smooth)
# except: faces.sort(lambda a,b: cmp(a.use_smooth, b.use_smooth))
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# Set the default mat to no material and no image.
contextMat = ( 0 , 0 ) # Can never be this, so we will label a new material teh first chance we get.
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB :
name1 = ob . name
name2 = ob . data . name
if name1 == name2 :
obnamestring = fixName ( name1 )
else :
obnamestring = ' %s _ %s ' % ( fixName ( name1 ) , fixName ( name2 ) )
if EXPORT_BLEN_OBS :
file . write ( ' o %s \n ' % obnamestring ) # Write Object name
else : # if EXPORT_GROUP_BY_OB:
file . write ( ' g %s \n ' % obnamestring )
# Vert
2010-08-15 15:04:51 +00:00
for v in me_verts :
2009-12-13 14:00:39 +00:00
file . write ( ' v %.6f %.6f %.6f \n ' % tuple ( v . co ) )
# UV
if faceuv :
2010-08-15 15:04:51 +00:00
uv_face_mapping = [ [ 0 , 0 , 0 , 0 ] for i in range ( len ( face_index_pairs ) ) ] # a bit of a waste for tri's :/
2009-12-13 14:00:39 +00:00
uv_dict = { } # could use a set() here
2010-08-23 22:16:45 +00:00
uv_layer = me . uv_textures . active . data
2009-12-13 14:00:39 +00:00
for f , f_index in face_index_pairs :
2010-08-15 15:04:51 +00:00
for uv_index , uv in enumerate ( uv_layer [ f_index ] . uv ) :
2009-12-13 14:00:39 +00:00
uvkey = veckey2d ( uv )
try :
uv_face_mapping [ f_index ] [ uv_index ] = uv_dict [ uvkey ]
except :
uv_face_mapping [ f_index ] [ uv_index ] = uv_dict [ uvkey ] = len ( uv_dict )
file . write ( ' vt %.6f %.6f \n ' % tuple ( uv ) )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
uv_unique_count = len ( uv_dict )
2010-08-09 10:35:03 +00:00
# del uv, uvkey, uv_dict, f_index, uv_index
2009-12-13 14:00:39 +00:00
# Only need uv_unique_count and uv_face_mapping
# NORMAL, Smooth/Non smoothed.
if EXPORT_NORMALS :
2010-08-15 15:04:51 +00:00
for f , f_index in face_index_pairs :
2010-08-18 07:14:10 +00:00
if f . use_smooth :
2010-08-18 03:42:26 +00:00
for v_idx in f . vertices :
2010-08-15 15:04:51 +00:00
v = me_verts [ v_idx ]
2009-12-13 14:00:39 +00:00
noKey = veckey3d ( v . normal )
if noKey not in globalNormals :
globalNormals [ noKey ] = totno
totno + = 1
file . write ( ' vn %.6f %.6f %.6f \n ' % noKey )
else :
# Hard, 1 normal from the face.
noKey = veckey3d ( f . normal )
if noKey not in globalNormals :
globalNormals [ noKey ] = totno
totno + = 1
file . write ( ' vn %.6f %.6f %.6f \n ' % noKey )
if not faceuv :
f_image = None
# XXX
if EXPORT_POLYGROUPS :
# Retrieve the list of vertex groups
2010-08-09 10:35:03 +00:00
# vertGroupNames = me.getVertGroupNames()
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
currentVGroup = ' '
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
2010-08-15 15:04:51 +00:00
vgroupsMap = [ [ ] for _i in range ( len ( me_verts ) ) ]
# vgroupsMap = [[] for _i in xrange(len(me_verts))]
2009-12-13 14:00:39 +00:00
for g in ob . vertex_groups :
2010-08-09 10:35:03 +00:00
# for vertexGroupName in vertGroupNames:
2010-08-15 15:04:51 +00:00
for v_idx , vWeight in getVertsFromGroup ( me , g . index ) :
# for v_idx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
vgroupsMap [ v_idx ] . append ( ( g . name , vWeight ) )
2009-05-13 00:20:14 +00:00
2010-07-27 10:00:34 +00:00
for f , f_index in face_index_pairs :
2010-08-18 03:42:26 +00:00
f_v = [ me_verts [ v_idx ] for v_idx in f . vertices ]
2009-09-29 15:27:00 +00:00
2010-08-18 03:42:26 +00:00
# if f.vertices[3] == 0:
2010-08-09 10:35:03 +00:00
# f_v.pop()
2009-09-29 15:27:00 +00:00
2010-08-09 10:35:03 +00:00
# f_v= f.v
2010-08-18 07:14:10 +00:00
f_smooth = f . use_smooth
2009-12-13 14:00:39 +00:00
f_mat = min ( f . material_index , len ( materialNames ) - 1 )
2010-08-09 10:35:03 +00:00
# f_mat = min(f.mat, len(materialNames)-1)
2009-12-13 14:00:39 +00:00
if faceuv :
2009-09-29 15:27:00 +00:00
2010-08-15 15:04:51 +00:00
tface = uv_layer [ f_index ]
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
f_image = tface . image
f_uv = tface . uv
# f_uv= [tface.uv1, tface.uv2, tface.uv3]
2010-08-18 03:42:26 +00:00
# if len(f.vertices) == 4:
2010-08-09 10:35:03 +00:00
# f_uv.append(tface.uv4)
# f_image = f.image
# f_uv= f.uv
2009-12-13 14:00:39 +00:00
# MAKE KEY
if faceuv and f_image : # Object is always true.
2010-08-09 10:35:03 +00:00
key = materialNames [ f_mat ] , f_image . name
2009-12-13 14:00:39 +00:00
else :
2010-08-09 10:35:03 +00:00
key = materialNames [ f_mat ] , None # No image, use None instead.
2009-12-13 14:00:39 +00:00
# Write the vertex group
if EXPORT_POLYGROUPS :
if len ( ob . vertex_groups ) :
# find what vertext group the face belongs to
theVGroup = findVertexGroupName ( f , vgroupsMap )
2010-08-09 10:35:03 +00:00
if theVGroup != currentVGroup :
2009-12-13 14:00:39 +00:00
currentVGroup = theVGroup
file . write ( ' g %s \n ' % theVGroup )
2010-08-09 10:35:03 +00:00
# # Write the vertex group
# if EXPORT_POLYGROUPS:
# if vertGroupNames:
# # find what vertext group the face belongs to
# theVGroup = findVertexGroupName(f,vgroupsMap)
# if theVGroup != currentVGroup:
# currentVGroup = theVGroup
# file.write('g %s\n' % theVGroup)
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
# CHECK FOR CONTEXT SWITCH
if key == contextMat :
2010-07-17 18:08:14 +00:00
pass # Context already switched, dont do anything
2009-12-13 14:00:39 +00:00
else :
if key [ 0 ] == None and key [ 1 ] == None :
# Write a null material, since we know the context has changed.
if EXPORT_GROUP_BY_MAT :
# can be mat_image or (null)
file . write ( ' g %s _ %s \n ' % ( fixName ( ob . name ) , fixName ( ob . data . name ) ) ) # can be mat_image or (null)
file . write ( ' usemtl (null) \n ' ) # mat, image
else :
2010-03-27 11:20:27 +00:00
mat_data = mtl_dict . get ( key )
2009-12-13 14:00:39 +00:00
if not mat_data :
# First add to global dict so we can export to mtl
# Then write mtl
# Make a new names from the mat and image name,
# converting any spaces to underscores with fixName.
# If none image dont bother adding it to the name
if key [ 1 ] == None :
2010-03-27 11:20:27 +00:00
mat_data = mtl_dict [ key ] = ( ' %s ' % fixName ( key [ 0 ] ) ) , materialItems [ f_mat ] , f_image
2009-12-13 14:00:39 +00:00
else :
2010-03-27 11:20:27 +00:00
mat_data = mtl_dict [ key ] = ( ' %s _ %s ' % ( fixName ( key [ 0 ] ) , fixName ( key [ 1 ] ) ) ) , materialItems [ f_mat ] , f_image
2009-12-13 14:00:39 +00:00
if EXPORT_GROUP_BY_MAT :
file . write ( ' g %s _ %s _ %s \n ' % ( fixName ( ob . name ) , fixName ( ob . data . name ) , mat_data [ 0 ] ) ) # can be mat_image or (null)
file . write ( ' usemtl %s \n ' % mat_data [ 0 ] ) # can be mat_image or (null)
contextMat = key
if f_smooth != contextSmooth :
if f_smooth : # on now off
file . write ( ' s 1 \n ' )
contextSmooth = f_smooth
else : # was off now on
file . write ( ' s off \n ' )
contextSmooth = f_smooth
file . write ( ' f ' )
if faceuv :
if EXPORT_NORMALS :
if f_smooth : # Smoothed, use vertex normals
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d / %d ' % \
2010-08-15 15:04:51 +00:00
( v . index + totverts ,
2009-12-13 14:00:39 +00:00
totuvco + uv_face_mapping [ f_index ] [ vi ] ,
2010-08-15 15:04:51 +00:00
globalNormals [ veckey3d ( v . normal ) ] ) ) # vert, uv, normal
2009-12-13 14:00:39 +00:00
else : # No smoothing, face normals
no = globalNormals [ veckey3d ( f . normal ) ]
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d / %d ' % \
2010-08-15 15:04:51 +00:00
( v . index + totverts ,
2009-12-13 14:00:39 +00:00
totuvco + uv_face_mapping [ f_index ] [ vi ] ,
no ) ) # vert, uv, normal
else : # No Normals
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d ' % ( \
2010-08-15 15:04:51 +00:00
v . index + totverts , \
2009-12-13 14:00:39 +00:00
totuvco + uv_face_mapping [ f_index ] [ vi ] ) ) # vert, uv
face_vert_index + = len ( f_v )
else : # No UV's
if EXPORT_NORMALS :
if f_smooth : # Smoothed, use vertex normals
for v in f_v :
file . write ( ' %d // %d ' %
2010-08-15 15:04:51 +00:00
( v . index + totverts , globalNormals [ veckey3d ( v . normal ) ] ) )
2009-12-13 14:00:39 +00:00
else : # No smoothing, face normals
no = globalNormals [ veckey3d ( f . normal ) ]
for v in f_v :
2010-08-15 15:04:51 +00:00
file . write ( ' %d // %d ' % ( v . index + totverts , no ) )
2009-12-13 14:00:39 +00:00
else : # No Normals
for v in f_v :
2010-08-15 15:04:51 +00:00
file . write ( ' %d ' % ( v . index + totverts ) )
2009-12-13 14:00:39 +00:00
file . write ( ' \n ' )
# Write edges.
if EXPORT_EDGES :
for ed in edges :
2010-08-18 07:14:10 +00:00
if ed . is_loose :
2010-08-18 03:42:26 +00:00
file . write ( ' f %d %d \n ' % ( ed . vertices [ 0 ] + totverts , ed . vertices [ 1 ] + totverts ) )
2009-12-13 14:00:39 +00:00
# Make the indicies global rather then per mesh
2010-08-15 15:04:51 +00:00
totverts + = len ( me_verts )
2009-12-13 14:00:39 +00:00
if faceuv :
totuvco + = uv_unique_count
# clean up
2010-01-09 23:44:01 +00:00
bpy . data . meshes . remove ( me )
2009-12-13 14:00:39 +00:00
if ob_main . dupli_type != ' NONE ' :
ob_main . free_dupli_list ( )
file . close ( )
# Now we have all our materials, save them
if EXPORT_MTL :
2010-06-02 17:58:28 +00:00
write_mtl ( scene , mtlfilepath , EXPORT_COPY_IMAGES , mtl_dict )
2010-08-09 10:35:03 +00:00
# if EXPORT_COPY_IMAGES:
# dest_dir = os.path.basename(filepath)
# # dest_dir = filepath
# # # Remove chars until we are just the path.
# # while dest_dir and dest_dir[-1] not in '\\/':
# # dest_dir = dest_dir[:-1]
# if dest_dir:
# copy_images(dest_dir, mtl_dict)
# else:
# print('\tError: "%s" could not be used as a base for an image path.' % filepath)
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
print ( " OBJ Export time: %.2f " % ( time . clock ( ) - time1 ) )
2009-09-29 15:27:00 +00:00
2010-09-01 12:11:34 +00:00
#
def _write ( context , filepath ,
2010-08-09 10:35:03 +00:00
EXPORT_TRI , # ok
EXPORT_EDGES ,
EXPORT_NORMALS , # not yet
EXPORT_NORMALS_HQ , # not yet
EXPORT_UV , # ok
EXPORT_MTL ,
EXPORT_COPY_IMAGES ,
EXPORT_APPLY_MODIFIERS , # ok
EXPORT_ROTX90 , # wrong
EXPORT_BLEN_OBS ,
EXPORT_GROUP_BY_OB ,
EXPORT_GROUP_BY_MAT ,
EXPORT_KEEP_VERT_ORDER ,
EXPORT_POLYGROUPS ,
EXPORT_CURVE_AS_NURBS ,
EXPORT_SEL_ONLY , # ok
EXPORT_ALL_SCENES , # XXX not working atm
EXPORT_ANIMATION ) : # Not used
2010-04-05 06:45:44 +00:00
2010-08-15 15:04:51 +00:00
base_name , ext = os . path . splitext ( filepath )
2009-12-13 14:00:39 +00:00
context_name = [ base_name , ' ' , ' ' , ext ] # Base name, scene name, frame number, extension
orig_scene = context . scene
2009-09-29 15:27:00 +00:00
2010-04-05 06:45:44 +00:00
# Exit edit mode before exporting, so current object states are exported properly.
2010-09-01 12:11:34 +00:00
if bpy . ops . object . mode_set . poll ( ) :
2010-07-26 18:32:22 +00:00
bpy . ops . object . mode_set ( mode = ' OBJECT ' )
2010-04-05 06:45:44 +00:00
2010-08-09 10:35:03 +00:00
# if EXPORT_ALL_SCENES:
# export_scenes = bpy.data.scenes
# else:
# export_scenes = [orig_scene]
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# XXX only exporting one scene atm since changing
# current scene is not possible.
# Brecht says that ideally in 2.5 we won't need such a function,
# allowing multiple scenes open at once.
export_scenes = [ orig_scene ]
# Export all scenes.
2010-08-09 10:35:03 +00:00
for scene in export_scenes :
# scene.makeCurrent() # If already current, this is not slow.
# context = scene.getRenderingContext()
orig_frame = scene . frame_current
2009-12-13 14:00:39 +00:00
if EXPORT_ALL_SCENES : # Add scene name into the context_name
2010-08-09 10:35:03 +00:00
context_name [ 1 ] = ' _ %s ' % bpy . path . clean_name ( scene . name ) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
2009-12-13 14:00:39 +00:00
# Export an animation?
if EXPORT_ANIMATION :
2010-08-09 10:35:03 +00:00
scene_frames = range ( scene . frame_start , context . frame_end + 1 ) # Up to and including the end frame.
2009-12-13 14:00:39 +00:00
else :
scene_frames = [ orig_frame ] # Dont export an animation.
# Loop through all frames in the scene and export.
for frame in scene_frames :
2010-06-02 17:58:28 +00:00
if EXPORT_ANIMATION : # Add frame to the filepath.
2009-12-13 14:00:39 +00:00
context_name [ 2 ] = ' _ %.6d ' % frame
2010-08-09 10:35:03 +00:00
scene . frame_current = frame
2009-12-13 14:00:39 +00:00
if EXPORT_SEL_ONLY :
2010-08-09 10:35:03 +00:00
objects = context . selected_objects
2009-12-13 14:00:39 +00:00
else :
2010-08-09 10:35:03 +00:00
objects = scene . objects
2009-12-13 14:00:39 +00:00
full_path = ' ' . join ( context_name )
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
# EXPORT THE FILE.
2010-08-09 10:35:03 +00:00
write_file ( full_path , objects , scene ,
EXPORT_TRI ,
EXPORT_EDGES ,
EXPORT_NORMALS ,
EXPORT_NORMALS_HQ ,
EXPORT_UV ,
EXPORT_MTL ,
EXPORT_COPY_IMAGES ,
EXPORT_APPLY_MODIFIERS ,
EXPORT_ROTX90 ,
EXPORT_BLEN_OBS ,
EXPORT_GROUP_BY_OB ,
EXPORT_GROUP_BY_MAT ,
EXPORT_KEEP_VERT_ORDER ,
EXPORT_POLYGROUPS ,
EXPORT_CURVE_AS_NURBS )
scene . frame_current = orig_frame
2009-12-13 14:00:39 +00:00
# Restore old active scene.
2010-08-09 10:35:03 +00:00
# orig_scene.makeCurrent()
# Window.WaitCursor(0)
2005-10-11 02:32:58 +00:00
2009-12-13 14:00:39 +00:00
- add torus back from 2.4x as an operator
bpy.ops.mesh.primitive_torus_add(major_radius=1, minor_radius=0.25, major_segments=48, minor_segments=16)
- experemental dynamic menus, used for INFO_MT_file, INFO_MT_file_import, INFO_MT_file_export and INFO_MT_mesh_add. these can have items added from python.
eg.
- removed OBJECT_OT_mesh_add, use the python add menu instead.
- made mesh primitive ops - MESH_OT_primitive_plane_add, ...cube_add, etc. work in object mode.
- RNA scene.active_object wrapped
- bugfix [#19466] 2.5: Tweak menu only available for mesh objects added within Edit Mode
ED_object_exit_editmode was always doing an undo push, made this optional using the existing flag - EM_DO_UNDO, called everywhere except when adding primitives.
2009-10-10 21:23:20 +00:00
'''
Currently the exporter lacks these features :
* multiple scene export ( only active scene is written )
* particles
'''
define operator properties in the class, similar to django fields
# Before
[
bpy.props.StringProperty(attr="path", name="File Path", description="File path used for exporting the PLY file", maxlen= 1024, default= ""),
bpy.props.BoolProperty(attr="use_modifiers", name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default= True),
bpy.props.BoolProperty(attr="use_normals", name="Export Normals", description="Export Normals for smooth and hard shaded faces", default= True),
bpy.props.BoolProperty(attr="use_uvs", name="Export UVs", description="Exort the active UV layer", default= True),
bpy.props.BoolProperty(attr="use_colors", name="Export Vertex Colors", description="Exort the active vertex color layer", default= True)
]
# After
path = StringProperty(attr="", name="File Path", description="File path used for exporting the PLY file", maxlen= 1024, default= "")
use_modifiers = BoolProperty(attr="", name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default= True)
use_normals = BoolProperty(attr="", name="Export Normals", description="Export Normals for smooth and hard shaded faces", default= True)
use_uvs = BoolProperty(attr="", name="Export UVs", description="Exort the active UV layer", default= True)
use_colors = BoolProperty(attr="", name="Export Vertex Colors", description="Exort the active vertex color layer", default= True)
2009-10-31 16:40:14 +00:00
2010-09-01 12:11:34 +00:00
def save ( operator , context , filepath = " " ,
use_triangles = False ,
use_edges = False ,
use_normals = False ,
use_hq_normals = False ,
use_uvs = True ,
use_materials = True ,
copy_images = False ,
use_modifiers = True ,
use_rotate_x90 = True ,
use_blen_objects = True ,
group_by_object = False ,
group_by_material = False ,
keep_vertex_order = False ,
use_vertex_groups = False ,
use_nurbs = True ,
use_selection = True ,
use_all_scenes = False ,
use_animation = False ,
) :
_write ( context , filepath ,
EXPORT_TRI = use_triangles ,
EXPORT_EDGES = use_edges ,
EXPORT_NORMALS = use_normals ,
EXPORT_NORMALS_HQ = use_hq_normals ,
EXPORT_UV = use_uvs ,
EXPORT_MTL = use_materials ,
EXPORT_COPY_IMAGES = copy_images ,
EXPORT_APPLY_MODIFIERS = use_modifiers ,
EXPORT_ROTX90 = use_rotate_x90 ,
EXPORT_BLEN_OBS = use_blen_objects ,
EXPORT_GROUP_BY_OB = group_by_object ,
EXPORT_GROUP_BY_MAT = group_by_material ,
EXPORT_KEEP_VERT_ORDER = keep_vertex_order ,
EXPORT_POLYGROUPS = use_vertex_groups ,
EXPORT_CURVE_AS_NURBS = use_nurbs ,
EXPORT_SEL_ONLY = use_selection ,
EXPORT_ALL_SCENES = use_all_scenes ,
EXPORT_ANIMATION = use_animation ,
)
return { ' FINISHED ' }