2009-11-01 15:21:20 +00:00
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
2010-02-12 13:34:04 +00:00
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2009-11-01 15:21:20 +00:00
#
# ##### END GPL LICENSE BLOCK #####
2004-06-07 01:34:15 +00:00
2009-12-13 14:38:30 +00:00
# <pep8 compliant>
2004-06-21 12:01:23 +00:00
"""
Name : ' Wavefront (.obj)... '
2009-09-29 15:27:00 +00:00
Blender : 248
2004-06-08 04:43:40 +00:00
Group : ' Export '
2004-06-21 12:01:23 +00:00
Tooltip : ' Save a Wavefront OBJ File '
"""
2004-06-07 01:34:15 +00:00
2009-05-13 00:20:14 +00:00
__author__ = " Campbell Barton, Jiri Hnidek, Paolo Ciccone "
2008-10-19 15:53:22 +00:00
__url__ = [ ' http://wiki.blender.org/index.php/Scripts/Manual/Export/wavefront_obj ' , ' www.blender.org ' , ' blenderartists.org ' ]
2009-09-29 15:27:00 +00:00
__version__ = " 1.21 "
2004-11-07 16:31:13 +00:00
__bpydoc__ = """ \
This script is an exporter to OBJ file format .
Usage :
2006-09-25 05:12:37 +00:00
Select the objects you wish to export and run this script from " File->Export " menu .
Selecting the default options from the popup box will be good in most cases .
All objects that can be represented as a mesh ( mesh , curve , metaball , surface , text3d )
will be exported as mesh data .
2004-11-07 16:31:13 +00:00
"""
2009-09-29 15:27:00 +00:00
# import math and other in functions that use them for the sake of fast Blender startup
# import math
import os
import time
import bpy
import Mathutils
2005-05-30 02:26:40 +00:00
2006-01-29 19:17:53 +00:00
# Returns a tuple - path,extension.
2009-09-29 15:27:00 +00:00
# 'hello.obj' > ('hello', '.obj')
2006-01-29 19:17:53 +00:00
def splitExt ( path ) :
2009-12-13 14:00:39 +00:00
dotidx = path . rfind ( ' . ' )
if dotidx == - 1 :
return path , ' '
else :
return path [ : dotidx ] , path [ dotidx : ]
2004-06-21 12:01:23 +00:00
2005-10-11 02:32:58 +00:00
def fixName ( name ) :
2009-12-13 14:00:39 +00:00
if name == None :
return ' None '
else :
return name . replace ( ' ' , ' _ ' )
2005-10-11 02:32:58 +00:00
# A Dict of Materials
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
2009-12-13 14:00:39 +00:00
MTL_DICT = { }
2009-09-29 15:27:00 +00:00
def write_mtl ( scene , filename , copy_images ) :
2009-12-13 14:00:39 +00:00
world = scene . world
worldAmb = world . ambient_color
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
dest_dir = os . path . dirname ( filename )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
def copy_image ( image ) :
rel = image . get_export_path ( dest_dir , True )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if copy_images :
abspath = image . get_export_path ( dest_dir , False )
if not os . path . exists ( abs_path ) :
shutil . copy ( image . get_abs_filename ( ) , abs_path )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
return rel
2009-09-29 15:27:00 +00:00
2004-06-21 12:01:23 +00:00
2009-12-13 14:00:39 +00:00
file = open ( filename , " w " )
# XXX
2009-09-29 15:27:00 +00:00
# file.write('# Blender3D MTL File: %s\n' % Blender.Get('filename').split('\\')[-1].split('/')[-1])
2009-12-13 14:00:39 +00:00
file . write ( ' # Material Count: %i \n ' % len ( MTL_DICT ) )
# Write material/image combinations we have used.
for key , ( mtl_mat_name , mat , img ) in MTL_DICT . items ( ) :
# Get the Blender data for the material and the image.
# Having an image named None will make a bug, dont do it :)
file . write ( ' newmtl %s \n ' % mtl_mat_name ) # Define a new material: matname_imgname
if mat :
file . write ( ' Ns %.6f \n ' % ( ( mat . specular_hardness - 1 ) * 1.9607843137254901 ) ) # Hardness, convert blenders 1-511 to MTL's
file . write ( ' Ka %.6f %.6f %.6f \n ' % tuple ( [ c * mat . ambient for c in worldAmb ] ) ) # Ambient, uses mirror colour,
file . write ( ' Kd %.6f %.6f %.6f \n ' % tuple ( [ c * mat . diffuse_intensity for c in mat . diffuse_color ] ) ) # Diffuse
file . write ( ' Ks %.6f %.6f %.6f \n ' % tuple ( [ c * mat . specular_intensity for c in mat . specular_color ] ) ) # Specular
if hasattr ( mat , " ior " ) :
file . write ( ' Ni %.6f \n ' % mat . ior ) # Refraction index
else :
file . write ( ' Ni %.6f \n ' % 1.0 )
file . write ( ' d %.6f \n ' % mat . alpha ) # Alpha (obj uses 'd' for dissolve)
# 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
if mat . shadeless :
file . write ( ' illum 0 \n ' ) # ignore lighting
elif mat . specular_intensity == 0 :
file . write ( ' illum 1 \n ' ) # no specular.
else :
file . write ( ' illum 2 \n ' ) # light normaly
else :
#write a dummy material here?
file . write ( ' Ns 0 \n ' )
file . write ( ' Ka %.6f %.6f %.6f \n ' % tuple ( [ c for c in worldAmb ] ) ) # Ambient, uses mirror colour,
file . write ( ' Kd 0.8 0.8 0.8 \n ' )
file . write ( ' Ks 0.8 0.8 0.8 \n ' )
file . write ( ' d 1 \n ' ) # No alpha
file . write ( ' illum 2 \n ' ) # light normaly
# Write images!
if img : # We have an image on the face!
# write relative image path
rel = copy_image ( img )
file . write ( ' map_Kd %s \n ' % rel ) # Diffuse mapping image
# file.write('map_Kd %s\n' % img.filename.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
elif mat : # No face image. if we havea material search for MTex image.
2010-02-07 12:51:47 +00:00
for mtex in mat . texture_slots :
2009-12-13 14:00:39 +00:00
if mtex and mtex . texture . type == ' IMAGE ' :
try :
filename = copy_image ( mtex . texture . image )
2009-09-29 15:27:00 +00:00
# filename = mtex.texture.image.filename.split('\\')[-1].split('/')[-1]
2009-12-13 14:00:39 +00:00
file . write ( ' map_Kd %s \n ' % filename ) # Diffuse mapping image
break
except :
# Texture has no image though its an image type, best ignore.
pass
file . write ( ' \n \n ' )
file . close ( )
2004-06-21 12:01:23 +00:00
2009-09-29 15:27:00 +00:00
# XXX not used
2006-01-29 19:17:53 +00:00
def copy_file ( source , dest ) :
2009-12-13 14:00:39 +00:00
file = open ( source , ' rb ' )
data = file . read ( )
file . close ( )
file = open ( dest , ' wb ' )
file . write ( data )
file . close ( )
2005-10-11 02:32:58 +00:00
2009-09-29 15:27:00 +00:00
# XXX not used
2006-01-29 19:17:53 +00:00
def copy_images ( dest_dir ) :
2009-12-13 14:00:39 +00:00
if dest_dir [ - 1 ] != os . sep :
dest_dir + = os . sep
2009-09-29 15:27:00 +00:00
# if dest_dir[-1] != sys.sep:
# dest_dir += sys.sep
2009-12-13 14:00:39 +00:00
# Get unique image names
uniqueImages = { }
for matname , mat , image in MTL_DICT . values ( ) : # Only use image name
# Get Texface images
if image :
uniqueImages [ image ] = image # Should use sets here. wait until Python 2.4 is default.
# Get MTex images
if mat :
2010-02-07 12:51:47 +00:00
for mtex in mat . texture_slots :
2009-12-13 14:00:39 +00:00
if mtex and mtex . texture . type == ' IMAGE ' :
image_tex = mtex . texture . image
if image_tex :
try :
uniqueImages [ image_tex ] = image_tex
except :
pass
# Now copy images
copyCount = 0
2009-09-29 15:27:00 +00:00
# for bImage in uniqueImages.values():
2009-11-03 18:08:25 +00:00
# image_path = bpy.utils.expandpath(bImage.filename)
2009-09-29 15:27:00 +00:00
# if bpy.sys.exists(image_path):
# # Make a name for the target path.
# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
2009-11-03 18:08:25 +00:00
# if not bpy.utils.exists(dest_image_path): # Image isnt alredy there
2009-09-29 15:27:00 +00:00
# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
# copy_file(image_path, dest_image_path)
# copyCount+=1
# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
2006-07-06 12:25:04 +00:00
2009-12-13 14:00:39 +00:00
print ( ' \t Copied %d images ' % copyCount )
2009-09-29 15:27:00 +00:00
# print('\tCopied %d images' % copyCount)
2009-06-10 19:14:05 +00:00
2009-09-29 15:27:00 +00:00
# XXX not converted
2009-06-10 19:14:05 +00:00
def test_nurbs_compat ( ob ) :
2009-12-13 14:00:39 +00:00
if ob . type != ' Curve ' :
return False
for nu in ob . data :
if ( not nu . knotsV ) and nu . type != 1 : # not a surface and not bezier
return True
return False
2009-06-10 19:14:05 +00:00
2009-09-29 15:27:00 +00:00
# XXX not converted
2009-06-10 19:14:05 +00:00
def write_nurb ( file , ob , ob_mat ) :
2009-12-13 14:00:39 +00:00
tot_verts = 0
cu = ob . data
# use negative indices
Vector = Blender . Mathutils . Vector
for nu in cu :
if nu . type == 0 : DEG_ORDER_U = 1
else : DEG_ORDER_U = nu . orderU - 1 # Tested to be correct
if nu . type == 1 :
print ( " \t Warning, bezier curve: " , ob . name , " only poly and nurbs curves supported " )
continue
if nu . knotsV :
print ( " \t Warning, surface: " , ob . name , " only poly and nurbs curves supported " )
continue
if len ( nu ) < = DEG_ORDER_U :
print ( " \t Warning, orderU is lower then vert count, skipping: " , ob . name )
continue
pt_num = 0
do_closed = ( nu . flagU & 1 )
do_endpoints = ( do_closed == 0 ) and ( nu . flagU & 2 )
for pt in nu :
pt = Vector ( pt [ 0 ] , pt [ 1 ] , pt [ 2 ] ) * ob_mat
file . write ( ' v %.6f %.6f %.6f \n ' % ( pt [ 0 ] , pt [ 1 ] , pt [ 2 ] ) )
pt_num + = 1
tot_verts + = pt_num
file . write ( ' g %s \n ' % ( fixName ( ob . name ) ) ) # fixName(ob.getData(1)) could use the data name too
file . write ( ' cstype bspline \n ' ) # not ideal, hard coded
file . write ( ' deg %d \n ' % DEG_ORDER_U ) # not used for curves but most files have it still
curve_ls = [ - ( i + 1 ) for i in range ( pt_num ) ]
# 'curv' keyword
if do_closed :
if DEG_ORDER_U == 1 :
pt_num + = 1
curve_ls . append ( - 1 )
else :
pt_num + = DEG_ORDER_U
curve_ls = curve_ls + curve_ls [ 0 : DEG_ORDER_U ]
file . write ( ' curv 0.0 1.0 %s \n ' % ( ' ' . join ( [ str ( i ) for i in curve_ls ] ) ) ) # Blender has no U and V values for the curve
# 'parm' keyword
tot_parm = ( DEG_ORDER_U + 1 ) + pt_num
tot_parm_div = float ( tot_parm - 1 )
parm_ls = [ ( i / tot_parm_div ) for i in range ( tot_parm ) ]
if do_endpoints : # end points, force param
for i in range ( DEG_ORDER_U + 1 ) :
parm_ls [ i ] = 0.0
parm_ls [ - ( 1 + i ) ] = 1.0
file . write ( ' parm u %s \n ' % ' ' . join ( [ str ( i ) for i in parm_ls ] ) )
file . write ( ' end \n ' )
return tot_verts
2009-06-10 19:14:05 +00:00
2009-09-29 15:27:00 +00:00
def write ( filename , objects , scene ,
2009-12-13 14:00:39 +00:00
EXPORT_TRI = False ,
EXPORT_EDGES = False ,
EXPORT_NORMALS = False ,
EXPORT_NORMALS_HQ = False ,
EXPORT_UV = True ,
EXPORT_MTL = True ,
EXPORT_COPY_IMAGES = False ,
EXPORT_APPLY_MODIFIERS = True ,
EXPORT_ROTX90 = True ,
EXPORT_BLEN_OBS = True ,
EXPORT_GROUP_BY_OB = False ,
EXPORT_GROUP_BY_MAT = False ,
EXPORT_KEEP_VERT_ORDER = False ,
EXPORT_POLYGROUPS = False ,
EXPORT_CURVE_AS_NURBS = True ) :
'''
Basic write function . The context and options must be alredy set
This can be accessed externaly
eg .
write ( ' c: \\ test \\ foobar.obj ' , Blender . Object . GetSelected ( ) ) # Using default options.
'''
# XXX
import math
def veckey3d ( v ) :
return round ( v . x , 6 ) , round ( v . y , 6 ) , round ( v . z , 6 )
def veckey2d ( v ) :
return round ( v [ 0 ] , 6 ) , round ( v [ 1 ] , 6 )
# return round(v.x, 6), round(v.y, 6)
def findVertexGroupName ( face , vWeightMap ) :
"""
Searches the vertexDict to see what groups is assigned to a given face .
We use a frequency system in order to sort out the name because a given vetex can
belong to two or more groups at the same time . To find the right name for the face
we list all the possible vertex group names with their frequency and then sort by
frequency in descend order . The top element is the one shared by the highest number
of vertices is the face ' s group
"""
weightDict = { }
for vert_index in face . verts :
2009-09-29 15:27:00 +00:00
# for vert in face:
2009-12-13 14:00:39 +00:00
vWeights = vWeightMap [ vert_index ]
2009-09-29 15:27:00 +00:00
# vWeights = vWeightMap[vert]
2009-12-13 14:00:39 +00:00
for vGroupName , weight in vWeights :
weightDict [ vGroupName ] = weightDict . get ( vGroupName , 0 ) + weight
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
if weightDict :
alist = [ ( weight , vGroupName ) for vGroupName , weight in weightDict . items ( ) ] # sort least to greatest amount of weight
alist . sort ( )
return ( alist [ - 1 ] [ 1 ] ) # highest value last
else :
return ' (null) '
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# TODO: implement this in C? dunno how it should be called...
def getVertsFromGroup ( me , group_index ) :
ret = [ ]
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
for i , v in enumerate ( me . verts ) :
for g in v . groups :
if g . group == group_index :
ret . append ( ( i , g . weight ) )
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
return ret
2009-09-29 15:27:00 +00:00
2006-04-03 16:14:24 +00:00
2009-12-13 14:00:39 +00:00
print ( ' OBJ Export path: " %s " ' % filename )
temp_mesh_name = ' ~tmp-mesh '
time1 = time . clock ( )
2009-09-29 15:27:00 +00:00
# time1 = sys.time()
# scn = Scene.GetCurrent()
2004-06-21 12:01:23 +00:00
2009-12-13 14:00:39 +00:00
file = open ( filename , " w " )
# Write Header
2010-01-02 23:14:01 +00:00
file . write ( ' # Blender3D v %s OBJ File: %s \n ' % ( bpy . app . version_string , bpy . data . filename . split ( ' / ' ) [ - 1 ] . split ( ' \\ ' ) [ - 1 ] ) )
2009-12-13 14:00:39 +00:00
file . write ( ' # www.blender3d.org \n ' )
# Tell the obj file what material file to use.
if EXPORT_MTL :
mtlfilename = ' %s .mtl ' % ' . ' . join ( filename . split ( ' . ' ) [ : - 1 ] )
file . write ( ' mtllib %s \n ' % ( mtlfilename . split ( ' \\ ' ) [ - 1 ] . split ( ' / ' ) [ - 1 ] ) )
if EXPORT_ROTX90 :
2010-02-01 23:52:46 +00:00
mat_xrot90 = Mathutils . RotationMatrix ( - math . pi / 2 , 4 , ' X ' )
2009-12-13 14:00:39 +00:00
# Initialize totals, these are updated each object
totverts = totuvco = totno = 1
face_vert_index = 1
globalNormals = { }
# Get all meshes
for ob_main in objects :
# ignore dupli children
if ob_main . parent and ob_main . parent . dupli_type != ' NONE ' :
# XXX
print ( ob_main . name , ' is a dupli child - ignoring ' )
continue
obs = [ ]
if ob_main . dupli_type != ' NONE ' :
# XXX
print ( ' creating dupli_list on ' , ob_main . name )
ob_main . create_dupli_list ( )
obs = [ ( dob . object , dob . matrix ) for dob in ob_main . dupli_list ]
# XXX debug print
print ( ob_main . name , ' has ' , len ( obs ) , ' dupli children ' )
else :
obs = [ ( ob_main , ob_main . matrix ) ]
for ob , ob_mat in obs :
# XXX postponed
2009-09-29 15:27:00 +00:00
# # Nurbs curve support
# if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
# if EXPORT_ROTX90:
# ob_mat = ob_mat * mat_xrot90
2009-12-13 14:00:39 +00:00
2009-09-29 15:27:00 +00:00
# totverts += write_nurb(file, ob, ob_mat)
2009-12-13 14:00:39 +00:00
2009-09-29 15:27:00 +00:00
# continue
# end nurbs
2009-12-13 14:00:39 +00:00
if ob . type != ' MESH ' :
continue
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
me = ob . create_mesh ( EXPORT_APPLY_MODIFIERS , ' PREVIEW ' )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if EXPORT_ROTX90 :
2009-12-25 20:52:05 +00:00
me . transform ( mat_xrot90 * ob_mat )
2009-12-13 14:00:39 +00:00
else :
me . transform ( ob_mat )
2009-09-29 15:27:00 +00:00
# # Will work for non meshes now! :)
# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
# if not me:
# continue
2009-12-13 14:00:39 +00:00
if EXPORT_UV :
faceuv = len ( me . uv_textures ) > 0
else :
faceuv = False
# XXX - todo, find a better way to do triangulation
# ...removed convert_to_triface because it relies on editmesh
'''
# We have a valid mesh
if EXPORT_TRI and me . faces :
# Add a dummy object to it.
has_quads = False
for f in me . faces :
if f . verts [ 3 ] != 0 :
has_quads = True
break
if has_quads :
2010-02-22 00:07:46 +00:00
newob = bpy . data . objects . new ( ' temp_object ' , me )
2009-12-13 14:00:39 +00:00
# if we forget to set Object.data - crash
scene . objects . link ( newob )
newob . convert_to_triface ( scene )
# mesh will still be there
scene . objects . unlink ( newob )
'''
# Make our own list so it can be sorted to reduce context switching
face_index_pairs = [ ( face , index ) for index , face in enumerate ( me . faces ) ]
# faces = [ f for f in me.faces ]
if EXPORT_EDGES :
edges = me . edges
else :
edges = [ ]
if not ( len ( face_index_pairs ) + len ( edges ) + len ( me . verts ) ) : # Make sure there is somthing to write
# clean up
2010-01-09 23:44:01 +00:00
bpy . data . meshes . remove ( me )
2009-12-13 14:00:39 +00:00
continue # dont bother with this mesh.
# XXX
# High Quality Normals
if EXPORT_NORMALS and face_index_pairs :
me . calc_normals ( )
2009-09-29 15:27:00 +00:00
# if EXPORT_NORMALS_HQ:
# BPyMesh.meshCalcNormals(me)
# else:
# # transforming normals is incorrect
# # when the matrix is scaled,
# # better to recalculate them
# me.calcNormals()
2009-12-13 14:00:39 +00:00
materials = me . materials
materialNames = [ ]
materialItems = [ m for m in materials ]
if materials :
for mat in materials :
if mat : # !=None
materialNames . append ( mat . name )
else :
materialNames . append ( None )
# Cant use LC because some materials are None.
# materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
# Possible there null materials, will mess up indicies
# but at least it will export, wait until Blender gets fixed.
materialNames . extend ( ( 16 - len ( materialNames ) ) * [ None ] )
materialItems . extend ( ( 16 - len ( materialItems ) ) * [ None ] )
# Sort by Material, then images
# so we dont over context switch in the obj file.
if EXPORT_KEEP_VERT_ORDER :
pass
elif faceuv :
# XXX update
tface = me . active_uv_texture . data
2010-01-09 23:44:01 +00:00
face_index_pairs . sort ( key = lambda a : ( a [ 0 ] . material_index , tface [ a [ 1 ] ] . image , a [ 0 ] . smooth ) )
2009-12-13 14:00:39 +00:00
elif len ( materials ) > 1 :
2010-01-09 23:44:01 +00:00
face_index_pairs . sort ( key = lambda a : ( a [ 0 ] . material_index , a [ 0 ] . smooth ) )
2009-12-13 14:00:39 +00:00
else :
# no materials
2010-01-09 23:44:01 +00:00
face_index_pairs . sort ( key = lambda a : a [ 0 ] . smooth )
2009-09-29 15:27:00 +00:00
# if EXPORT_KEEP_VERT_ORDER:
# pass
# elif faceuv:
# try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth)))
# elif len(materials) > 1:
# try: faces.sort(key = lambda a: (a.mat, a.smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth)))
# else:
# # no materials
# try: faces.sort(key = lambda a: a.smooth)
# except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth))
2009-12-13 14:00:39 +00:00
faces = [ pair [ 0 ] for pair in face_index_pairs ]
# Set the default mat to no material and no image.
contextMat = ( 0 , 0 ) # Can never be this, so we will label a new material teh first chance we get.
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB :
name1 = ob . name
name2 = ob . data . name
if name1 == name2 :
obnamestring = fixName ( name1 )
else :
obnamestring = ' %s _ %s ' % ( fixName ( name1 ) , fixName ( name2 ) )
if EXPORT_BLEN_OBS :
file . write ( ' o %s \n ' % obnamestring ) # Write Object name
else : # if EXPORT_GROUP_BY_OB:
file . write ( ' g %s \n ' % obnamestring )
# Vert
for v in me . verts :
file . write ( ' v %.6f %.6f %.6f \n ' % tuple ( v . co ) )
# UV
if faceuv :
uv_face_mapping = [ [ 0 , 0 , 0 , 0 ] for f in faces ] # a bit of a waste for tri's :/
uv_dict = { } # could use a set() here
uv_layer = me . active_uv_texture
for f , f_index in face_index_pairs :
tface = uv_layer . data [ f_index ]
2010-01-16 15:20:27 +00:00
# workaround, since tface.uv iteration is wrong atm
2010-01-20 14:06:38 +00:00
uvs = tface . uv
2009-12-13 14:00:39 +00:00
# uvs = [tface.uv1, tface.uv2, tface.uv3]
# # add another UV if it's a quad
# if len(f.verts) == 4:
# uvs.append(tface.uv4)
for uv_index , uv in enumerate ( uvs ) :
uvkey = veckey2d ( uv )
try :
uv_face_mapping [ f_index ] [ uv_index ] = uv_dict [ uvkey ]
except :
uv_face_mapping [ f_index ] [ uv_index ] = uv_dict [ uvkey ] = len ( uv_dict )
file . write ( ' vt %.6f %.6f \n ' % tuple ( uv ) )
2009-09-29 15:27:00 +00:00
# uv_dict = {} # could use a set() here
# for f_index, f in enumerate(faces):
2009-12-13 14:00:39 +00:00
2009-09-29 15:27:00 +00:00
# for uv_index, uv in enumerate(f.uv):
# uvkey = veckey2d(uv)
# try:
# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
# except:
# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
# file.write('vt %.6f %.6f\n' % tuple(uv))
2009-12-13 14:00:39 +00:00
uv_unique_count = len ( uv_dict )
2009-09-29 15:27:00 +00:00
# del uv, uvkey, uv_dict, f_index, uv_index
2009-12-13 14:00:39 +00:00
# Only need uv_unique_count and uv_face_mapping
# NORMAL, Smooth/Non smoothed.
if EXPORT_NORMALS :
for f in faces :
if f . smooth :
2010-01-09 23:44:01 +00:00
for vIdx in f . verts :
v = me . verts [ vIdx ]
2009-12-13 14:00:39 +00:00
noKey = veckey3d ( v . normal )
if noKey not in globalNormals :
globalNormals [ noKey ] = totno
totno + = 1
file . write ( ' vn %.6f %.6f %.6f \n ' % noKey )
else :
# Hard, 1 normal from the face.
noKey = veckey3d ( f . normal )
if noKey not in globalNormals :
globalNormals [ noKey ] = totno
totno + = 1
file . write ( ' vn %.6f %.6f %.6f \n ' % noKey )
if not faceuv :
f_image = None
# XXX
if EXPORT_POLYGROUPS :
# Retrieve the list of vertex groups
2009-09-29 15:27:00 +00:00
# vertGroupNames = me.getVertGroupNames()
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
currentVGroup = ' '
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
vgroupsMap = [ [ ] for _i in range ( len ( me . verts ) ) ]
2009-09-29 15:27:00 +00:00
# vgroupsMap = [[] for _i in xrange(len(me.verts))]
2009-12-13 14:00:39 +00:00
for g in ob . vertex_groups :
2009-09-29 15:27:00 +00:00
# for vertexGroupName in vertGroupNames:
2009-12-13 14:00:39 +00:00
for vIdx , vWeight in getVertsFromGroup ( me , g . index ) :
2009-09-29 15:27:00 +00:00
# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
2009-12-13 14:00:39 +00:00
vgroupsMap [ vIdx ] . append ( ( g . name , vWeight ) )
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
for f_index , f in enumerate ( faces ) :
f_v = [ { " index " : index , " vertex " : me . verts [ index ] } for index in f . verts ]
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# if f.verts[3] == 0:
# f_v.pop()
2009-09-29 15:27:00 +00:00
# f_v= f.v
2009-12-13 14:00:39 +00:00
f_smooth = f . smooth
f_mat = min ( f . material_index , len ( materialNames ) - 1 )
2009-09-29 15:27:00 +00:00
# f_mat = min(f.mat, len(materialNames)-1)
2009-12-13 14:00:39 +00:00
if faceuv :
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
tface = me . active_uv_texture . data [ face_index_pairs [ f_index ] [ 1 ] ]
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
f_image = tface . image
f_uv = tface . uv
# f_uv= [tface.uv1, tface.uv2, tface.uv3]
# if len(f.verts) == 4:
# f_uv.append(tface.uv4)
2009-09-29 15:27:00 +00:00
# f_image = f.image
# f_uv= f.uv
2009-12-13 14:00:39 +00:00
# MAKE KEY
if faceuv and f_image : # Object is always true.
key = materialNames [ f_mat ] , f_image . name
else :
key = materialNames [ f_mat ] , None # No image, use None instead.
# Write the vertex group
if EXPORT_POLYGROUPS :
if len ( ob . vertex_groups ) :
# find what vertext group the face belongs to
theVGroup = findVertexGroupName ( f , vgroupsMap )
if theVGroup != currentVGroup :
currentVGroup = theVGroup
file . write ( ' g %s \n ' % theVGroup )
2009-09-29 15:27:00 +00:00
# # Write the vertex group
# if EXPORT_POLYGROUPS:
# if vertGroupNames:
# # find what vertext group the face belongs to
# theVGroup = findVertexGroupName(f,vgroupsMap)
# if theVGroup != currentVGroup:
# currentVGroup = theVGroup
# file.write('g %s\n' % theVGroup)
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
# CHECK FOR CONTEXT SWITCH
if key == contextMat :
pass # Context alredy switched, dont do anything
else :
if key [ 0 ] == None and key [ 1 ] == None :
# Write a null material, since we know the context has changed.
if EXPORT_GROUP_BY_MAT :
# can be mat_image or (null)
file . write ( ' g %s _ %s \n ' % ( fixName ( ob . name ) , fixName ( ob . data . name ) ) ) # can be mat_image or (null)
file . write ( ' usemtl (null) \n ' ) # mat, image
else :
mat_data = MTL_DICT . get ( key )
if not mat_data :
# First add to global dict so we can export to mtl
# Then write mtl
# Make a new names from the mat and image name,
# converting any spaces to underscores with fixName.
# If none image dont bother adding it to the name
if key [ 1 ] == None :
mat_data = MTL_DICT [ key ] = ( ' %s ' % fixName ( key [ 0 ] ) ) , materialItems [ f_mat ] , f_image
else :
mat_data = MTL_DICT [ key ] = ( ' %s _ %s ' % ( fixName ( key [ 0 ] ) , fixName ( key [ 1 ] ) ) ) , materialItems [ f_mat ] , f_image
if EXPORT_GROUP_BY_MAT :
file . write ( ' g %s _ %s _ %s \n ' % ( fixName ( ob . name ) , fixName ( ob . data . name ) , mat_data [ 0 ] ) ) # can be mat_image or (null)
file . write ( ' usemtl %s \n ' % mat_data [ 0 ] ) # can be mat_image or (null)
contextMat = key
if f_smooth != contextSmooth :
if f_smooth : # on now off
file . write ( ' s 1 \n ' )
contextSmooth = f_smooth
else : # was off now on
file . write ( ' s off \n ' )
contextSmooth = f_smooth
file . write ( ' f ' )
if faceuv :
if EXPORT_NORMALS :
if f_smooth : # Smoothed, use vertex normals
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d / %d ' % \
( v [ " index " ] + totverts ,
totuvco + uv_face_mapping [ f_index ] [ vi ] ,
globalNormals [ veckey3d ( v [ " vertex " ] . normal ) ] ) ) # vert, uv, normal
else : # No smoothing, face normals
no = globalNormals [ veckey3d ( f . normal ) ]
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d / %d ' % \
( v [ " index " ] + totverts ,
totuvco + uv_face_mapping [ f_index ] [ vi ] ,
no ) ) # vert, uv, normal
else : # No Normals
for vi , v in enumerate ( f_v ) :
file . write ( ' %d / %d ' % ( \
v [ " index " ] + totverts , \
totuvco + uv_face_mapping [ f_index ] [ vi ] ) ) # vert, uv
face_vert_index + = len ( f_v )
else : # No UV's
if EXPORT_NORMALS :
if f_smooth : # Smoothed, use vertex normals
for v in f_v :
file . write ( ' %d // %d ' %
( v [ " index " ] + totverts , globalNormals [ veckey3d ( v [ " vertex " ] . normal ) ] ) )
else : # No smoothing, face normals
no = globalNormals [ veckey3d ( f . normal ) ]
for v in f_v :
file . write ( ' %d // %d ' % ( v [ " index " ] + totverts , no ) )
else : # No Normals
for v in f_v :
file . write ( ' %d ' % ( v [ " index " ] + totverts ) )
file . write ( ' \n ' )
# Write edges.
if EXPORT_EDGES :
for ed in edges :
if ed . loose :
file . write ( ' f %d %d \n ' % ( ed . verts [ 0 ] + totverts , ed . verts [ 1 ] + totverts ) )
# Make the indicies global rather then per mesh
totverts + = len ( me . verts )
if faceuv :
totuvco + = uv_unique_count
# clean up
2010-01-09 23:44:01 +00:00
bpy . data . meshes . remove ( me )
2009-12-13 14:00:39 +00:00
if ob_main . dupli_type != ' NONE ' :
ob_main . free_dupli_list ( )
file . close ( )
# Now we have all our materials, save them
if EXPORT_MTL :
write_mtl ( scene , mtlfilename , EXPORT_COPY_IMAGES )
2009-09-29 15:27:00 +00:00
# if EXPORT_COPY_IMAGES:
# dest_dir = os.path.basename(filename)
# # dest_dir = filename
# # # Remove chars until we are just the path.
# # while dest_dir and dest_dir[-1] not in '\\/':
# # dest_dir = dest_dir[:-1]
# if dest_dir:
# copy_images(dest_dir)
# else:
# print('\tError: "%s" could not be used as a base for an image path.' % filename)
2009-12-13 14:00:39 +00:00
print ( " OBJ Export time: %.2f " % ( time . clock ( ) - time1 ) )
2009-09-29 15:27:00 +00:00
# print "OBJ Export time: %.2f" % (sys.time() - time1)
2009-12-13 14:00:39 +00:00
def do_export ( filename , context ,
EXPORT_APPLY_MODIFIERS = True , # not used
EXPORT_ROTX90 = True , # wrong
EXPORT_TRI = False , # ok
EXPORT_EDGES = False ,
EXPORT_NORMALS = False , # not yet
EXPORT_NORMALS_HQ = False , # not yet
EXPORT_UV = True , # ok
EXPORT_MTL = True ,
EXPORT_SEL_ONLY = True , # ok
EXPORT_ALL_SCENES = False , # XXX not working atm
EXPORT_ANIMATION = False ,
EXPORT_COPY_IMAGES = False ,
EXPORT_BLEN_OBS = True ,
EXPORT_GROUP_BY_OB = False ,
EXPORT_GROUP_BY_MAT = False ,
EXPORT_KEEP_VERT_ORDER = False ,
EXPORT_POLYGROUPS = False ,
EXPORT_CURVE_AS_NURBS = True ) :
# Window.EditMode(0)
# Window.WaitCursor(1)
base_name , ext = splitExt ( filename )
context_name = [ base_name , ' ' , ' ' , ext ] # Base name, scene name, frame number, extension
orig_scene = context . scene
2009-09-29 15:27:00 +00:00
# if EXPORT_ALL_SCENES:
# export_scenes = bpy.data.scenes
# else:
# export_scenes = [orig_scene]
2009-12-13 14:00:39 +00:00
# XXX only exporting one scene atm since changing
# current scene is not possible.
# Brecht says that ideally in 2.5 we won't need such a function,
# allowing multiple scenes open at once.
export_scenes = [ orig_scene ]
# Export all scenes.
for scn in export_scenes :
# scn.makeCurrent() # If already current, this is not slow.
# context = scn.getRenderingContext()
orig_frame = scn . current_frame
if EXPORT_ALL_SCENES : # Add scene name into the context_name
context_name [ 1 ] = ' _ %s ' % bpy . utils . clean_name ( scn . name ) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
# Export an animation?
if EXPORT_ANIMATION :
scene_frames = range ( scn . start_frame , context . end_frame + 1 ) # Up to and including the end frame.
else :
scene_frames = [ orig_frame ] # Dont export an animation.
# Loop through all frames in the scene and export.
for frame in scene_frames :
if EXPORT_ANIMATION : # Add frame to the filename.
context_name [ 2 ] = ' _ %.6d ' % frame
scn . current_frame = frame
if EXPORT_SEL_ONLY :
export_objects = context . selected_objects
else :
export_objects = scn . objects
full_path = ' ' . join ( context_name )
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
# EXPORT THE FILE.
write ( full_path , export_objects , scn ,
EXPORT_TRI , EXPORT_EDGES , EXPORT_NORMALS ,
EXPORT_NORMALS_HQ , EXPORT_UV , EXPORT_MTL ,
EXPORT_COPY_IMAGES , EXPORT_APPLY_MODIFIERS ,
EXPORT_ROTX90 , EXPORT_BLEN_OBS ,
EXPORT_GROUP_BY_OB , EXPORT_GROUP_BY_MAT , EXPORT_KEEP_VERT_ORDER ,
EXPORT_POLYGROUPS , EXPORT_CURVE_AS_NURBS )
scn . current_frame = orig_frame
# Restore old active scene.
2009-09-29 15:27:00 +00:00
# orig_scene.makeCurrent()
# Window.WaitCursor(0)
2005-10-11 02:32:58 +00:00
2009-12-13 14:00:39 +00:00
- add torus back from 2.4x as an operator
bpy.ops.mesh.primitive_torus_add(major_radius=1, minor_radius=0.25, major_segments=48, minor_segments=16)
- experemental dynamic menus, used for INFO_MT_file, INFO_MT_file_import, INFO_MT_file_export and INFO_MT_mesh_add. these can have items added from python.
eg.
- removed OBJECT_OT_mesh_add, use the python add menu instead.
- made mesh primitive ops - MESH_OT_primitive_plane_add, ...cube_add, etc. work in object mode.
- RNA scene.active_object wrapped
- bugfix [#19466] 2.5: Tweak menu only available for mesh objects added within Edit Mode
ED_object_exit_editmode was always doing an undo push, made this optional using the existing flag - EM_DO_UNDO, called everywhere except when adding primitives.
2009-10-10 21:23:20 +00:00
'''
Currently the exporter lacks these features :
* nurbs
* multiple scene export ( only active scene is written )
* particles
'''
define operator properties in the class, similar to django fields
# Before
[
bpy.props.StringProperty(attr="path", name="File Path", description="File path used for exporting the PLY file", maxlen= 1024, default= ""),
bpy.props.BoolProperty(attr="use_modifiers", name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default= True),
bpy.props.BoolProperty(attr="use_normals", name="Export Normals", description="Export Normals for smooth and hard shaded faces", default= True),
bpy.props.BoolProperty(attr="use_uvs", name="Export UVs", description="Exort the active UV layer", default= True),
bpy.props.BoolProperty(attr="use_colors", name="Export Vertex Colors", description="Exort the active vertex color layer", default= True)
]
# After
path = StringProperty(attr="", name="File Path", description="File path used for exporting the PLY file", maxlen= 1024, default= "")
use_modifiers = BoolProperty(attr="", name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default= True)
use_normals = BoolProperty(attr="", name="Export Normals", description="Export Normals for smooth and hard shaded faces", default= True)
use_uvs = BoolProperty(attr="", name="Export UVs", description="Exort the active UV layer", default= True)
use_colors = BoolProperty(attr="", name="Export Vertex Colors", description="Exort the active vertex color layer", default= True)
2009-10-31 16:40:14 +00:00
from bpy . props import *
2009-11-04 18:35:32 +00:00
class ExportOBJ ( bpy . types . Operator ) :
2009-12-13 14:00:39 +00:00
''' Save a Wavefront OBJ File '''
bl_idname = " export.obj "
bl_label = ' Export OBJ '
# List of operator properties, the attributes will be assigned
# to the class instance from the operator settings before calling.
path = StringProperty ( name = " File Path " , description = " File path used for exporting the OBJ file " , maxlen = 1024 , default = " " )
2010-02-01 22:04:33 +00:00
check_existing = BoolProperty ( name = " Check Existing " , description = " Check and warn on overwriting existing files " , default = True , options = { ' HIDDEN ' } )
2010-01-31 14:46:28 +00:00
2009-12-13 14:00:39 +00:00
# context group
use_selection = BoolProperty ( name = " Selection Only " , description = " " , default = False )
use_all_scenes = BoolProperty ( name = " All Scenes " , description = " " , default = False )
use_animation = BoolProperty ( name = " All Animation " , description = " " , default = False )
# object group
use_modifiers = BoolProperty ( name = " Apply Modifiers " , description = " " , default = True )
use_rotate90 = BoolProperty ( name = " Rotate X90 " , description = " " , default = True )
# extra data group
use_edges = BoolProperty ( name = " Edges " , description = " " , default = True )
use_normals = BoolProperty ( name = " Normals " , description = " " , default = False )
use_hq_normals = BoolProperty ( name = " High Quality Normals " , description = " " , default = True )
use_uvs = BoolProperty ( name = " UVs " , description = " " , default = True )
use_materials = BoolProperty ( name = " Materials " , description = " " , default = True )
copy_images = BoolProperty ( name = " Copy Images " , description = " " , default = False )
use_triangles = BoolProperty ( name = " Triangulate " , description = " " , default = False )
use_vertex_groups = BoolProperty ( name = " Polygroups " , description = " " , default = False )
use_nurbs = BoolProperty ( name = " Nurbs " , description = " " , default = False )
# grouping group
use_blen_objects = BoolProperty ( name = " Objects as OBJ Objects " , description = " " , default = True )
group_by_object = BoolProperty ( name = " Objects as OBJ Groups " , description = " " , default = False )
group_by_material = BoolProperty ( name = " Material Groups " , description = " " , default = False )
keep_vertex_order = BoolProperty ( name = " Keep Vertex Order " , description = " " , default = False )
def execute ( self , context ) :
2010-02-09 19:22:57 +00:00
path = self . properties . path
if not path . lower ( ) . endswith ( " .obj " ) :
path + = " .obj "
do_export ( path , context ,
2009-12-13 14:00:39 +00:00
EXPORT_TRI = self . properties . use_triangles ,
EXPORT_EDGES = self . properties . use_edges ,
EXPORT_NORMALS = self . properties . use_normals ,
EXPORT_NORMALS_HQ = self . properties . use_hq_normals ,
EXPORT_UV = self . properties . use_uvs ,
EXPORT_MTL = self . properties . use_materials ,
EXPORT_COPY_IMAGES = self . properties . copy_images ,
EXPORT_APPLY_MODIFIERS = self . properties . use_modifiers ,
EXPORT_ROTX90 = self . properties . use_rotate90 ,
EXPORT_BLEN_OBS = self . properties . use_blen_objects ,
EXPORT_GROUP_BY_OB = self . properties . group_by_object ,
EXPORT_GROUP_BY_MAT = self . properties . group_by_material ,
EXPORT_KEEP_VERT_ORDER = self . properties . keep_vertex_order ,
EXPORT_POLYGROUPS = self . properties . use_vertex_groups ,
EXPORT_CURVE_AS_NURBS = self . properties . use_nurbs ,
EXPORT_SEL_ONLY = self . properties . use_selection ,
EXPORT_ALL_SCENES = self . properties . use_all_scenes )
2009-12-24 19:50:43 +00:00
return { ' FINISHED ' }
2009-12-13 14:00:39 +00:00
def invoke ( self , context , event ) :
wm = context . manager
wm . add_fileselect ( self )
2009-12-24 21:17:14 +00:00
return { ' RUNNING_MODAL ' }
2009-12-13 14:00:39 +00:00
2009-09-29 15:27:00 +00:00
2009-11-04 18:35:32 +00:00
def menu_func ( self , context ) :
default_path = bpy . data . filename . replace ( " .blend " , " .obj " )
2010-02-15 09:53:02 +00:00
self . layout . operator ( ExportOBJ . bl_idname , text = " Wavefront (.obj) " ) . path = default_path
2009-11-04 18:35:32 +00:00
- add torus back from 2.4x as an operator
bpy.ops.mesh.primitive_torus_add(major_radius=1, minor_radius=0.25, major_segments=48, minor_segments=16)
- experemental dynamic menus, used for INFO_MT_file, INFO_MT_file_import, INFO_MT_file_export and INFO_MT_mesh_add. these can have items added from python.
eg.
- removed OBJECT_OT_mesh_add, use the python add menu instead.
- made mesh primitive ops - MESH_OT_primitive_plane_add, ...cube_add, etc. work in object mode.
- RNA scene.active_object wrapped
- bugfix [#19466] 2.5: Tweak menu only available for mesh objects added within Edit Mode
ED_object_exit_editmode was always doing an undo push, made this optional using the existing flag - EM_DO_UNDO, called everywhere except when adding primitives.
2009-10-10 21:23:20 +00:00
2010-02-14 11:21:21 +00:00
def register ( ) :
bpy . types . register ( ExportOBJ )
bpy . types . INFO_MT_file_export . append ( menu_func )
2010-02-22 23:32:58 +00:00
2010-02-14 11:21:21 +00:00
def unregister ( ) :
bpy . types . unregister ( ExportOBJ )
bpy . types . INFO_MT_file_export . remove ( menu_func )
2009-09-29 15:27:00 +00:00
# CONVERSION ISSUES
# - matrix problem
# - duplis - only tested dupliverts
# - NURBS - needs API additions
# - all scenes export
# + normals calculation
2009-11-28 20:50:31 +00:00
2010-02-16 09:55:07 +00:00
if __name__ == " __main__ " :
register ( )