2009-11-01 15:21:20 +00:00
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
2009-12-13 14:00:39 +00:00
#
2009-11-01 15:21:20 +00:00
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
2010-02-12 13:34:04 +00:00
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2009-11-01 15:21:20 +00:00
#
# ##### END GPL LICENSE BLOCK #####
2006-09-04 04:23:55 +00:00
2009-12-13 14:38:30 +00:00
# <pep8 compliant>
2010-09-01 02:48:23 +00:00
# Script copyright (C) Campbell Barton
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
2006-09-04 04:23:55 +00:00
2010-09-01 02:48:23 +00:00
"""
2006-09-25 05:12:37 +00:00
This script imports a Wavefront OBJ files to Blender .
2006-09-04 04:23:55 +00:00
Usage :
Run this script from " File->Import " menu and then load the desired OBJ file .
2006-09-25 05:12:37 +00:00
Note , This loads mesh objects and materials only , nurbs and curves are not supported .
2010-09-01 02:48:23 +00:00
http : / / wiki . blender . org / index . php / Scripts / Manual / Import / wavefront_obj
2006-09-04 04:23:55 +00:00
"""
2009-09-29 15:27:00 +00:00
import os
import time
2007-03-27 14:49:37 +00:00
import bpy
2010-04-11 14:22:27 +00:00
import mathutils
2010-10-25 22:44:01 +00:00
from mathutils . geometry import PolyFill
2010-09-01 12:11:34 +00:00
from io_utils import load_image , unpack_list , unpack_face_list
2006-09-04 04:23:55 +00:00
2009-09-29 15:27:00 +00:00
def BPyMesh_ngon ( from_data , indices , PREF_FIX_LOOPS = True ) :
2009-12-13 14:00:39 +00:00
'''
Takes a polyline of indices ( fgon )
and returns a list of face indicie lists .
Designed to be used for importers that need indices for an fgon to create from existing verts .
from_data : either a mesh , or a list / tuple of vectors .
indices : a list of indicies to use this list is the ordered closed polyline to fill , and can be a subset of the data given .
PREF_FIX_LOOPS : If this is enabled polylines that use loops to make multiple polylines are delt with correctly .
'''
if not set : # Need sets for this, otherwise do a normal fill.
PREF_FIX_LOOPS = False
2010-04-11 14:22:27 +00:00
Vector = mathutils . Vector
2009-12-13 14:00:39 +00:00
if not indices :
return [ ]
2010-09-01 14:49:34 +00:00
# return []
2009-12-13 14:00:39 +00:00
def rvec ( co ) : return round ( co . x , 6 ) , round ( co . y , 6 ) , round ( co . z , 6 )
def mlen ( co ) : return abs ( co [ 0 ] ) + abs ( co [ 1 ] ) + abs ( co [ 2 ] ) # manhatten length of a vector, faster then length
def vert_treplet ( v , i ) :
return v , rvec ( v ) , i , mlen ( v )
def ed_key_mlen ( v1 , v2 ) :
if v1 [ 3 ] > v2 [ 3 ] :
return v2 [ 1 ] , v1 [ 1 ]
else :
return v1 [ 1 ] , v2 [ 1 ]
if not PREF_FIX_LOOPS :
'''
Normal single concave loop filling
'''
if type ( from_data ) in ( tuple , list ) :
verts = [ Vector ( from_data [ i ] ) for ii , i in enumerate ( indices ) ]
else :
2010-08-18 03:42:26 +00:00
verts = [ from_data . vertices [ i ] . co for ii , i in enumerate ( indices ) ]
2009-12-13 14:00:39 +00:00
for i in range ( len ( verts ) - 1 , 0 , - 1 ) : # same as reversed(xrange(1, len(verts))):
if verts [ i ] [ 1 ] == verts [ i - 1 ] [ 0 ] :
verts . pop ( i - 1 )
2010-04-11 14:22:27 +00:00
fill = PolyFill ( [ verts ] )
2009-12-13 14:00:39 +00:00
else :
'''
Seperate this loop into multiple loops be finding edges that are used twice
This is used by lightwave LWO files a lot
'''
if type ( from_data ) in ( tuple , list ) :
verts = [ vert_treplet ( Vector ( from_data [ i ] ) , ii ) for ii , i in enumerate ( indices ) ]
else :
2010-08-18 03:42:26 +00:00
verts = [ vert_treplet ( from_data . vertices [ i ] . co , ii ) for ii , i in enumerate ( indices ) ]
2009-12-13 14:00:39 +00:00
edges = [ ( i , i - 1 ) for i in range ( len ( verts ) ) ]
if edges :
edges [ 0 ] = ( 0 , len ( verts ) - 1 )
if not verts :
return [ ]
edges_used = set ( )
edges_doubles = set ( )
# We need to check if any edges are used twice location based.
for ed in edges :
edkey = ed_key_mlen ( verts [ ed [ 0 ] ] , verts [ ed [ 1 ] ] )
if edkey in edges_used :
edges_doubles . add ( edkey )
else :
edges_used . add ( edkey )
# Store a list of unconnected loop segments split by double edges.
# will join later
loop_segments = [ ]
v_prev = verts [ 0 ]
context_loop = [ v_prev ]
loop_segments = [ context_loop ]
for v in verts :
if v != v_prev :
# Are we crossing an edge we removed?
if ed_key_mlen ( v , v_prev ) in edges_doubles :
context_loop = [ v ]
loop_segments . append ( context_loop )
else :
if context_loop and context_loop [ - 1 ] [ 1 ] == v [ 1 ] :
#raise "as"
pass
else :
context_loop . append ( v )
v_prev = v
# Now join loop segments
def join_seg ( s1 , s2 ) :
if s2 [ - 1 ] [ 1 ] == s1 [ 0 ] [ 1 ] : #
s1 , s2 = s2 , s1
elif s1 [ - 1 ] [ 1 ] == s2 [ 0 ] [ 1 ] :
pass
else :
return False
# If were stuill here s1 and s2 are 2 segments in the same polyline
s1 . pop ( ) # remove the last vert from s1
s1 . extend ( s2 ) # add segment 2 to segment 1
if s1 [ 0 ] [ 1 ] == s1 [ - 1 ] [ 1 ] : # remove endpoints double
s1 . pop ( )
s2 [ : ] = [ ] # Empty this segment s2 so we dont use it again.
return True
joining_segments = True
while joining_segments :
joining_segments = False
segcount = len ( loop_segments )
for j in range ( segcount - 1 , - 1 , - 1 ) : #reversed(range(segcount)):
seg_j = loop_segments [ j ]
if seg_j :
for k in range ( j - 1 , - 1 , - 1 ) : # reversed(range(j)):
if not seg_j :
break
seg_k = loop_segments [ k ]
if seg_k and join_seg ( seg_j , seg_k ) :
joining_segments = True
loop_list = loop_segments
for verts in loop_list :
while verts and verts [ 0 ] [ 1 ] == verts [ - 1 ] [ 1 ] :
verts . pop ( )
loop_list = [ verts for verts in loop_list if len ( verts ) > 2 ]
# DONE DEALING WITH LOOP FIXING
# vert mapping
vert_map = [ None ] * len ( indices )
ii = 0
for verts in loop_list :
if len ( verts ) > 2 :
for i , vert in enumerate ( verts ) :
vert_map [ i + ii ] = vert [ 2 ]
ii + = len ( verts )
2010-04-11 14:22:27 +00:00
fill = PolyFill ( [ [ v [ 0 ] for v in loop ] for loop in loop_list ] )
2009-12-13 14:00:39 +00:00
#draw_loops(loop_list)
#raise 'done loop'
# map to original indicies
fill = [ [ vert_map [ i ] for i in reversed ( f ) ] for f in fill ]
if not fill :
print ( ' Warning Cannot scanfill, fallback on a triangle fan. ' )
fill = [ [ 0 , i - 1 , i ] for i in range ( 2 , len ( indices ) ) ]
else :
# Use real scanfill.
# See if its flipped the wrong way.
flip = None
for fi in fill :
if flip != None :
break
for i , vi in enumerate ( fi ) :
if vi == 0 and fi [ i - 1 ] == 1 :
flip = False
break
elif vi == 1 and fi [ i - 1 ] == 0 :
flip = True
break
if not flip :
for i , fi in enumerate ( fill ) :
fill [ i ] = tuple ( [ ii for ii in reversed ( fi ) ] )
return fill
2006-09-25 05:12:37 +00:00
2006-09-04 04:23:55 +00:00
def line_value ( line_split ) :
2009-12-13 14:00:39 +00:00
'''
Returns 1 string represneting the value for this line
None will be returned if theres only 1 word
'''
length = len ( line_split )
if length == 1 :
return None
elif length == 2 :
return line_split [ 1 ]
elif length > 2 :
return ' ' . join ( line_split [ 1 : ] )
2006-09-04 04:23:55 +00:00
2009-09-29 15:27:00 +00:00
2006-09-20 05:03:53 +00:00
def obj_image_load ( imagepath , DIR , IMAGE_SEARCH ) :
2009-12-13 14:00:39 +00:00
if ' _ ' in imagepath :
image = load_image ( imagepath . replace ( ' _ ' , ' ' ) , DIR )
2010-08-13 03:17:10 +00:00
if image :
return image
2009-09-29 15:27:00 +00:00
2010-08-13 03:17:10 +00:00
image = load_image ( imagepath , DIR )
if image :
return image
print ( " failed to load ' %s ' doesn ' t exist " , imagepath )
return None
2009-09-29 15:27:00 +00:00
# def obj_image_load(imagepath, DIR, IMAGE_SEARCH):
2010-09-01 14:49:34 +00:00
# '''
# Mainly uses comprehensiveImageLoad
# but tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
# '''
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# if '_' in imagepath:
# image= BPyImage.comprehensiveImageLoad(imagepath, DIR, PLACE_HOLDER= False, RECURSIVE= IMAGE_SEARCH)
# if image: return image
# # Did the exporter rename the image?
# image= BPyImage.comprehensiveImageLoad(imagepath.replace('_', ' '), DIR, PLACE_HOLDER= False, RECURSIVE= IMAGE_SEARCH)
# if image: return image
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# # Return an image, placeholder if it dosnt exist
# image= BPyImage.comprehensiveImageLoad(imagepath, DIR, PLACE_HOLDER= True, RECURSIVE= IMAGE_SEARCH)
# return image
2009-12-13 14:00:39 +00:00
2006-09-04 04:23:55 +00:00
2006-09-20 05:03:53 +00:00
def create_materials ( filepath , material_libs , unique_materials , unique_material_images , IMAGE_SEARCH ) :
2009-12-13 14:00:39 +00:00
'''
Create all the used materials in this obj ,
assign colors and images to the materials from all referenced material libs
'''
2010-06-14 03:52:10 +00:00
DIR = os . path . dirname ( filepath )
2009-12-13 14:00:39 +00:00
#==================================================================================#
# This function sets textures defined in .mtl file #
#==================================================================================#
def load_material_image ( blender_material , context_material_name , imagepath , type ) :
2010-08-25 01:20:50 +00:00
texture = bpy . data . textures . new ( name = type , type = ' IMAGE ' )
2009-12-13 14:00:39 +00:00
# Absolute path - c:\.. etc would work here
2010-08-25 01:20:50 +00:00
image = obj_image_load ( imagepath , DIR , IMAGE_SEARCH )
has_data = False
2009-12-13 14:00:39 +00:00
if image :
texture . image = image
2010-08-25 01:20:50 +00:00
has_data = image . has_data
2009-12-13 14:00:39 +00:00
# Adds textures for materials (rendering)
if type == ' Kd ' :
if has_data and image . depth == 32 :
# Image has alpha
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_color_diffuse = True
mtex . use_map_alpha = True
2009-12-13 14:00:39 +00:00
texture . mipmap = True
texture . interpolation = True
texture . use_alpha = True
2010-08-20 06:09:58 +00:00
blender_material . use_transparency = True
2009-12-13 14:00:39 +00:00
blender_material . alpha = 0.0
else :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_color_diffuse = True
2009-12-13 14:00:39 +00:00
# adds textures to faces (Textured/Alt-Z mode)
# Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
unique_material_images [ context_material_name ] = image , has_data # set the texface image
elif type == ' Ka ' :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_ambient = True
2009-12-13 14:00:39 +00:00
elif type == ' Ks ' :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_specular = True
2009-12-13 14:00:39 +00:00
elif type == ' Bump ' :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_normal = True
2010-09-13 02:35:59 +00:00
2009-12-13 14:00:39 +00:00
elif type == ' D ' :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_alpha = True
2010-09-12 20:24:15 +00:00
blender_material . use_transparency = True
blender_material . transparency_method = ' Z_TRANSPARENCY '
2009-12-13 14:00:39 +00:00
blender_material . alpha = 0.0
# Todo, unset deffuse material alpha if it has an alpha channel
elif type == ' refl ' :
2010-09-04 11:11:37 +00:00
mtex = blender_material . texture_slots . add ( )
mtex . texture = texture
mtex . texture_coords = ' UV '
mtex . use_map_reflect = True
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# Add an MTL with the same name as the obj if no MTLs are spesified.
2010-06-14 03:52:10 +00:00
temp_mtl = os . path . splitext ( ( os . path . basename ( filepath ) ) ) [ 0 ] + ' .mtl '
2009-12-13 14:00:39 +00:00
2010-07-23 17:51:39 +00:00
if os . path . exists ( os . path . join ( DIR , temp_mtl ) ) and temp_mtl not in material_libs :
2009-12-13 14:00:39 +00:00
material_libs . append ( temp_mtl )
del temp_mtl
#Create new materials
for name in unique_materials : # .keys()
if name != None :
2010-01-12 11:21:42 +00:00
unique_materials [ name ] = bpy . data . materials . new ( name )
2009-12-13 14:00:39 +00:00
unique_material_images [ name ] = None , False # assign None to all material images to start with, add to later.
unique_materials [ None ] = None
unique_material_images [ None ] = None , False
for libname in material_libs :
2010-07-23 17:51:39 +00:00
mtlpath = os . path . join ( DIR , libname )
2009-12-13 14:00:39 +00:00
if not os . path . exists ( mtlpath ) :
2010-07-23 17:51:39 +00:00
print ( " \t Error Missing MTL: ' %s ' " % mtlpath )
2009-12-13 14:00:39 +00:00
else :
#print '\t\tloading mtl: "%s"' % mtlpath
context_material = None
mtl = open ( mtlpath , ' rU ' )
for line in mtl : #.xreadlines():
if line . startswith ( ' newmtl ' ) :
context_material_name = line_value ( line . split ( ) )
if context_material_name in unique_materials :
context_material = unique_materials [ context_material_name ]
else :
context_material = None
elif context_material :
# we need to make a material to assign properties to it.
line_split = line . split ( )
line_lower = line . lower ( ) . lstrip ( )
if line_lower . startswith ( ' ka ' ) :
2010-09-13 02:35:59 +00:00
context_material . mirror_color = float ( line_split [ 1 ] ) , float ( line_split [ 2 ] ) , float ( line_split [ 3 ] )
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' kd ' ) :
2010-09-13 02:35:59 +00:00
context_material . diffuse_color = float ( line_split [ 1 ] ) , float ( line_split [ 2 ] ) , float ( line_split [ 3 ] )
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' ks ' ) :
2010-09-13 02:35:59 +00:00
context_material . specular_color = float ( line_split [ 1 ] ) , float ( line_split [ 2 ] ) , float ( line_split [ 3 ] )
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' ns ' ) :
2010-09-13 02:35:59 +00:00
context_material . specular_hardness = int ( ( float ( line_split [ 1 ] ) * 0.51 ) )
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' ni ' ) : # Refraction index
2010-09-13 02:35:59 +00:00
context_material . raytrace_transparency . ior = max ( 1 , min ( float ( line_split [ 1 ] ) , 3 ) ) # between 1 and 3
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' d ' ) or line_lower . startswith ( ' tr ' ) :
context_material . alpha = float ( line_split [ 1 ] )
2010-09-14 00:30:45 +00:00
context_material . use_transparency = True
context_material . transparency_method = ' Z_TRANSPARENCY '
2009-12-13 14:00:39 +00:00
elif line_lower . startswith ( ' map_ka ' ) :
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' Ka ' )
elif line_lower . startswith ( ' map_ks ' ) :
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' Ks ' )
elif line_lower . startswith ( ' map_kd ' ) :
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' Kd ' )
elif line_lower . startswith ( ' map_bump ' ) :
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' Bump ' )
2010-09-13 02:35:59 +00:00
elif line_lower . startswith ( ' map_d ' ) or line_lower . startswith ( ' map_tr ' ) : # Alpha map - Dissolve
2009-12-13 14:00:39 +00:00
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' D ' )
2010-09-13 02:35:59 +00:00
elif line_lower . startswith ( ' refl ' ) : # reflectionmap
2009-12-13 14:00:39 +00:00
img_filepath = line_value ( line . split ( ) )
if img_filepath :
load_material_image ( context_material , context_material_name , img_filepath , ' refl ' )
mtl . close ( )
2010-08-13 15:07:57 +00:00
def split_mesh ( verts_loc , faces , unique_materials , filepath , SPLIT_OB_OR_GROUP ) :
2009-12-13 14:00:39 +00:00
'''
2010-07-20 10:41:08 +00:00
Takes vert_loc and faces , and separates into multiple sets of
2009-12-13 14:00:39 +00:00
( verts_loc , faces , unique_materials , dataname )
'''
2010-06-14 03:52:10 +00:00
filename = os . path . splitext ( ( os . path . basename ( filepath ) ) ) [ 0 ]
2009-12-13 14:00:39 +00:00
2010-08-13 15:07:57 +00:00
if not SPLIT_OB_OR_GROUP :
2009-12-13 14:00:39 +00:00
# use the filename for the object name since we arnt chopping up the mesh.
return [ ( verts_loc , faces , unique_materials , filename ) ]
def key_to_name ( key ) :
# if the key is a tuple, join it to make a string
2010-08-13 15:07:57 +00:00
if not key :
2009-12-13 14:00:39 +00:00
return filename # assume its a string. make sure this is true if the splitting code is changed
else :
return key
# Return a key that makes the faces unique.
face_split_dict = { }
oldkey = - 1 # initialize to a value that will never match the key
for face in faces :
2010-08-13 15:07:57 +00:00
key = face [ 4 ]
2009-12-13 14:00:39 +00:00
if oldkey != key :
# Check the key has changed.
try :
verts_split , faces_split , unique_materials_split , vert_remap = face_split_dict [ key ]
except KeyError :
faces_split = [ ]
verts_split = [ ]
unique_materials_split = { }
vert_remap = [ - 1 ] * len ( verts_loc )
face_split_dict [ key ] = ( verts_split , faces_split , unique_materials_split , vert_remap )
oldkey = key
face_vert_loc_indicies = face [ 0 ]
# Remap verts to new vert list and add where needed
for enum , i in enumerate ( face_vert_loc_indicies ) :
if vert_remap [ i ] == - 1 :
new_index = len ( verts_split )
vert_remap [ i ] = new_index # set the new remapped index so we only add once and can reference next time.
face_vert_loc_indicies [ enum ] = new_index # remap to the local index
verts_split . append ( verts_loc [ i ] ) # add the vert to the local verts
else :
face_vert_loc_indicies [ enum ] = vert_remap [ i ] # remap to the local index
matname = face [ 2 ]
if matname and matname not in unique_materials_split :
unique_materials_split [ matname ] = unique_materials [ matname ]
faces_split . append ( face )
# remove one of the itemas and reorder
return [ ( value [ 0 ] , value [ 1 ] , value [ 2 ] , key_to_name ( key ) ) for key , value in list ( face_split_dict . items ( ) ) ]
2006-09-04 04:23:55 +00:00
2010-08-13 15:07:57 +00:00
def create_mesh ( new_objects , has_ngons , CREATE_FGONS , CREATE_EDGES , verts_loc , verts_tex , faces , unique_materials , unique_material_images , unique_smooth_groups , vertex_groups , dataname ) :
2009-12-13 14:00:39 +00:00
'''
Takes all the data gathered and generates a mesh , adding the new object to new_objects
deals with fgons , sharp edges and assigning materials
'''
if not has_ngons :
CREATE_FGONS = False
if unique_smooth_groups :
sharp_edges = { }
2010-07-15 00:52:26 +00:00
smooth_group_users = { context_smooth_group : { } for context_smooth_group in list ( unique_smooth_groups . keys ( ) ) }
2009-12-13 14:00:39 +00:00
context_smooth_group_old = - 1
# Split fgons into tri's
fgon_edges = { } # Used for storing fgon keys
if CREATE_EDGES :
edges = [ ]
context_object = None
# reverse loop through face indicies
for f_idx in range ( len ( faces ) - 1 , - 1 , - 1 ) :
face_vert_loc_indicies , \
face_vert_tex_indicies , \
context_material , \
context_smooth_group , \
context_object = faces [ f_idx ]
len_face_vert_loc_indicies = len ( face_vert_loc_indicies )
if len_face_vert_loc_indicies == 1 :
faces . pop ( f_idx ) # cant add single vert faces
elif not face_vert_tex_indicies or len_face_vert_loc_indicies == 2 : # faces that have no texture coords are lines
if CREATE_EDGES :
# generators are better in python 2.4+ but can't be used in 2.3
# edges.extend( (face_vert_loc_indicies[i], face_vert_loc_indicies[i+1]) for i in xrange(len_face_vert_loc_indicies-1) )
edges . extend ( [ ( face_vert_loc_indicies [ i ] , face_vert_loc_indicies [ i + 1 ] ) for i in range ( len_face_vert_loc_indicies - 1 ) ] )
faces . pop ( f_idx )
else :
# Smooth Group
if unique_smooth_groups and context_smooth_group :
# Is a part of of a smooth group and is a face
if context_smooth_group_old is not context_smooth_group :
edge_dict = smooth_group_users [ context_smooth_group ]
context_smooth_group_old = context_smooth_group
for i in range ( len_face_vert_loc_indicies ) :
i1 = face_vert_loc_indicies [ i ]
i2 = face_vert_loc_indicies [ i - 1 ]
if i1 > i2 : i1 , i2 = i2 , i1
try :
edge_dict [ i1 , i2 ] + = 1
except KeyError :
edge_dict [ i1 , i2 ] = 1
# FGons into triangles
if has_ngons and len_face_vert_loc_indicies > 4 :
ngon_face_indices = BPyMesh_ngon ( verts_loc , face_vert_loc_indicies )
2010-08-27 04:07:12 +00:00
faces . extend (
[ (
[ face_vert_loc_indicies [ ngon [ 0 ] ] , face_vert_loc_indicies [ ngon [ 1 ] ] , face_vert_loc_indicies [ ngon [ 2 ] ] ] ,
[ face_vert_tex_indicies [ ngon [ 0 ] ] , face_vert_tex_indicies [ ngon [ 1 ] ] , face_vert_tex_indicies [ ngon [ 2 ] ] ] ,
context_material ,
context_smooth_group ,
context_object )
for ngon in ngon_face_indices ]
2009-12-13 14:00:39 +00:00
)
# edges to make fgons
if CREATE_FGONS :
edge_users = { }
for ngon in ngon_face_indices :
for i in ( 0 , 1 , 2 ) :
i1 = face_vert_loc_indicies [ ngon [ i ] ]
i2 = face_vert_loc_indicies [ ngon [ i - 1 ] ]
if i1 > i2 : i1 , i2 = i2 , i1
try :
edge_users [ i1 , i2 ] + = 1
except KeyError :
edge_users [ i1 , i2 ] = 1
for key , users in edge_users . items ( ) :
if users > 1 :
fgon_edges [ key ] = None
# remove all after 3, means we dont have to pop this one.
faces . pop ( f_idx )
# Build sharp edges
if unique_smooth_groups :
for edge_dict in list ( smooth_group_users . values ( ) ) :
for key , users in list ( edge_dict . items ( ) ) :
if users == 1 : # This edge is on the boundry of a group
sharp_edges [ key ] = None
# map the material names to an index
2010-07-15 00:52:26 +00:00
material_mapping = { name : i for i , name in enumerate ( unique_materials ) } # enumerate over unique_materials keys()
2009-12-13 14:00:39 +00:00
materials = [ None ] * len ( unique_materials )
for name , index in list ( material_mapping . items ( ) ) :
materials [ index ] = unique_materials [ name ]
2010-01-09 23:44:01 +00:00
me = bpy . data . meshes . new ( dataname )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# make sure the list isnt too big
2010-04-03 05:20:58 +00:00
for material in materials :
2010-09-03 07:25:37 +00:00
me . materials . append ( material )
2009-09-29 15:27:00 +00:00
2010-08-26 22:44:05 +00:00
me . vertices . add ( len ( verts_loc ) )
me . faces . add ( len ( faces ) )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# verts_loc is a list of (x, y, z) tuples
2010-08-18 03:42:26 +00:00
me . vertices . foreach_set ( " co " , unpack_list ( verts_loc ) )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# faces is a list of (vert_indices, texco_indices, ...) tuples
# XXX faces should contain either 3 or 4 verts
# XXX no check for valid face indices
2010-08-18 03:42:26 +00:00
me . faces . foreach_set ( " vertices_raw " , unpack_face_list ( [ f [ 0 ] for f in faces ] ) )
2009-12-13 14:00:39 +00:00
if verts_tex and me . faces :
2010-08-23 22:16:45 +00:00
me . uv_textures . new ( )
2009-12-13 14:00:39 +00:00
context_material_old = - 1 # avoid a dict lookup
mat = 0 # rare case it may be un-initialized.
me_faces = me . faces
for i , face in enumerate ( faces ) :
if len ( face [ 0 ] ) < 2 :
pass #raise "bad face"
elif len ( face [ 0 ] ) == 2 :
if CREATE_EDGES :
edges . append ( face [ 0 ] )
else :
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
blender_face = me . faces [ i ]
face_vert_loc_indicies , \
face_vert_tex_indicies , \
context_material , \
context_smooth_group , \
context_object = face
if context_smooth_group :
2010-08-18 07:14:10 +00:00
blender_face . use_smooth = True
2009-12-13 14:00:39 +00:00
if context_material :
if context_material_old is not context_material :
mat = material_mapping [ context_material ]
context_material_old = context_material
blender_face . material_index = mat
2010-09-01 14:49:34 +00:00
# blender_face.mat= mat
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
if verts_tex :
blender_tface = me . uv_textures [ 0 ] . data [ i ]
if context_material :
2010-08-13 03:17:10 +00:00
image , has_data = unique_material_images [ context_material ]
2009-12-13 14:00:39 +00:00
if image : # Can be none if the material dosnt have an image.
2010-08-13 03:17:10 +00:00
blender_tface . image = image
2010-08-21 08:38:29 +00:00
blender_tface . use_image = True
2010-08-13 03:17:10 +00:00
if has_data and image . depth == 32 :
2010-08-19 12:51:31 +00:00
blender_tface . blend_type = ' ALPHA '
2009-12-13 14:00:39 +00:00
# BUG - Evil eekadoodle problem where faces that have vert index 0 location at 3 or 4 are shuffled.
if len ( face_vert_loc_indicies ) == 4 :
if face_vert_loc_indicies [ 2 ] == 0 or face_vert_loc_indicies [ 3 ] == 0 :
face_vert_tex_indicies = face_vert_tex_indicies [ 2 ] , face_vert_tex_indicies [ 3 ] , face_vert_tex_indicies [ 0 ] , face_vert_tex_indicies [ 1 ]
else : # length of 3
if face_vert_loc_indicies [ 2 ] == 0 :
face_vert_tex_indicies = face_vert_tex_indicies [ 1 ] , face_vert_tex_indicies [ 2 ] , face_vert_tex_indicies [ 0 ]
# END EEEKADOODLE FIX
# assign material, uv's and image
blender_tface . uv1 = verts_tex [ face_vert_tex_indicies [ 0 ] ]
blender_tface . uv2 = verts_tex [ face_vert_tex_indicies [ 1 ] ]
blender_tface . uv3 = verts_tex [ face_vert_tex_indicies [ 2 ] ]
if len ( face_vert_loc_indicies ) == 4 :
blender_tface . uv4 = verts_tex [ face_vert_tex_indicies [ 3 ] ]
2009-09-29 15:27:00 +00:00
2010-09-01 14:49:34 +00:00
# for ii, uv in enumerate(blender_face.uv):
# uv.x, uv.y= verts_tex[face_vert_tex_indicies[ii]]
2009-12-13 14:00:39 +00:00
del me_faces
2010-09-01 14:49:34 +00:00
# del ALPHA
2009-09-29 15:27:00 +00:00
2010-09-07 00:32:45 +00:00
if CREATE_EDGES and not edges :
CREATE_EDGES = False
2009-09-29 15:27:00 +00:00
2010-09-07 00:32:45 +00:00
if CREATE_EDGES :
2010-08-26 22:44:05 +00:00
me . edges . add ( len ( edges ) )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# edges should be a list of (a, b) tuples
2010-08-18 03:42:26 +00:00
me . edges . foreach_set ( " vertices " , unpack_list ( edges ) )
2010-09-01 14:49:34 +00:00
# me_edges.extend( edges )
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# del me_edges
2009-12-13 14:00:39 +00:00
# Add edge faces.
2010-09-01 14:49:34 +00:00
# me_edges= me.edges
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
def edges_match ( e1 , e2 ) :
return ( e1 [ 0 ] == e2 [ 0 ] and e1 [ 1 ] == e2 [ 1 ] ) or ( e1 [ 0 ] == e2 [ 1 ] and e1 [ 1 ] == e2 [ 0 ] )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# XXX slow
2010-09-01 14:49:34 +00:00
# if CREATE_FGONS and fgon_edges:
# for fgon_edge in fgon_edges.keys():
# for ed in me.edges:
# if edges_match(fgon_edge, ed.vertices):
# ed.is_fgon = True
# if CREATE_FGONS and fgon_edges:
# FGON= Mesh.EdgeFlags.FGON
# for ed in me.findEdges( fgon_edges.keys() ):
2010-09-18 10:43:32 +00:00
# if ed is not None:
2010-09-01 14:49:34 +00:00
# me_edges[ed].flag |= FGON
# del FGON
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# XXX slow
2010-09-01 14:49:34 +00:00
# if unique_smooth_groups and sharp_edges:
# for sharp_edge in sharp_edges.keys():
# for ed in me.edges:
# if edges_match(sharp_edge, ed.vertices):
# ed.use_edge_sharp = True
# if unique_smooth_groups and sharp_edges:
# SHARP= Mesh.EdgeFlags.SHARP
# for ed in me.findEdges( sharp_edges.keys() ):
2010-09-18 10:43:32 +00:00
# if ed is not None:
2010-09-01 14:49:34 +00:00
# me_edges[ed].flag |= SHARP
# del SHARP
2009-09-29 15:27:00 +00:00
2010-09-07 00:32:45 +00:00
me . update ( calc_edges = CREATE_EDGES )
2010-09-01 14:49:34 +00:00
# me.calcNormals()
2009-12-13 14:00:39 +00:00
2010-02-22 00:07:46 +00:00
ob = bpy . data . objects . new ( " Mesh " , me )
2009-12-13 14:00:39 +00:00
new_objects . append ( ob )
2006-09-04 04:23:55 +00:00
2009-12-13 14:00:39 +00:00
# Create the vertex groups. No need to have the flag passed here since we test for the
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
# the following test will never run
for group_name , group_indicies in vertex_groups . items ( ) :
2010-08-27 02:33:35 +00:00
group = ob . vertex_groups . new ( group_name )
2010-08-31 11:31:21 +00:00
ob . vertex_groups . assign ( group_indicies , group , 1.0 , ' REPLACE ' )
2009-05-13 00:20:14 +00:00
2009-06-12 04:16:12 +00:00
2010-08-13 15:07:57 +00:00
def create_nurbs ( context_nurbs , vert_loc , new_objects ) :
2009-12-13 14:00:39 +00:00
'''
Add nurbs object to blender , only support one type at the moment
'''
deg = context_nurbs . get ( ' deg ' , ( 3 , ) )
2010-01-04 21:10:51 +00:00
curv_range = context_nurbs . get ( ' curv_range ' )
2009-12-13 14:00:39 +00:00
curv_idx = context_nurbs . get ( ' curv_idx ' , [ ] )
parm_u = context_nurbs . get ( ' parm_u ' , [ ] )
parm_v = context_nurbs . get ( ' parm_v ' , [ ] )
name = context_nurbs . get ( ' name ' , ' ObjNurb ' )
2010-01-04 21:10:51 +00:00
cstype = context_nurbs . get ( ' cstype ' )
2009-12-13 14:00:39 +00:00
2010-09-18 10:43:32 +00:00
if cstype is None :
2009-12-13 14:00:39 +00:00
print ( ' \t Warning, cstype not found ' )
return
if cstype != ' bspline ' :
print ( ' \t Warning, cstype is not supported (only bspline) ' )
return
if not curv_idx :
print ( ' \t Warning, curv argument empty or not set ' )
return
if len ( deg ) > 1 or parm_v :
print ( ' \t Warning, surfaces not supported ' )
return
2010-08-27 04:07:12 +00:00
cu = bpy . data . curves . new ( name , ' CURVE ' )
cu . dimensions = ' 3D '
2009-12-13 14:00:39 +00:00
2010-08-27 04:07:12 +00:00
nu = cu . splines . new ( ' NURBS ' )
nu . points . add ( len ( curv_idx ) - 1 ) # a point is added to start with
nu . points . foreach_set ( " co " , [ co_axis for vt_idx in curv_idx for co_axis in ( vert_loc [ vt_idx ] + ( 1.0 , ) ) ] )
2009-12-13 14:00:39 +00:00
2010-08-27 04:07:12 +00:00
nu . order_u = deg [ 0 ] + 1
2009-12-13 14:00:39 +00:00
# get for endpoint flag from the weighting
if curv_range and len ( parm_u ) > deg [ 0 ] + 1 :
do_endpoints = True
for i in range ( deg [ 0 ] + 1 ) :
if abs ( parm_u [ i ] - curv_range [ 0 ] ) > 0.0001 :
do_endpoints = False
break
if abs ( parm_u [ - ( i + 1 ) ] - curv_range [ 1 ] ) > 0.0001 :
do_endpoints = False
break
else :
do_endpoints = False
if do_endpoints :
2010-08-27 04:07:12 +00:00
nu . use_endpoint_u = True
2009-12-13 14:00:39 +00:00
# close
'''
do_closed = False
if len ( parm_u ) > deg [ 0 ] + 1 :
for i in xrange ( deg [ 0 ] + 1 ) :
#print curv_idx[i], curv_idx[-(i+1)]
if curv_idx [ i ] == curv_idx [ - ( i + 1 ) ] :
do_closed = True
break
if do_closed :
2010-08-27 04:07:12 +00:00
nu . use_cyclic_u = True
2009-12-13 14:00:39 +00:00
'''
2010-08-13 15:07:57 +00:00
2010-08-27 04:07:12 +00:00
ob = bpy . data . objects . new ( " Nurb " , cu )
2009-12-13 14:00:39 +00:00
new_objects . append ( ob )
2009-06-13 03:05:20 +00:00
2009-06-12 04:16:12 +00:00
def strip_slash ( line_split ) :
2009-12-13 14:00:39 +00:00
if line_split [ - 1 ] [ - 1 ] == ' \\ ' :
if len ( line_split [ - 1 ] ) == 1 :
line_split . pop ( ) # remove the \ item
else :
line_split [ - 1 ] = line_split [ - 1 ] [ : - 1 ] # remove the \ from the end last number
return True
return False
2009-06-12 04:16:12 +00:00
2006-09-04 04:23:55 +00:00
def get_float_func ( filepath ) :
2009-12-13 14:00:39 +00:00
'''
find the float function for this obj file
2010-08-30 19:51:12 +00:00
- whether to replace commas or not
2009-12-13 14:00:39 +00:00
'''
file = open ( filepath , ' rU ' )
for line in file : #.xreadlines():
line = line . lstrip ( )
if line . startswith ( ' v ' ) : # vn vt v
if ' , ' in line :
return lambda f : float ( f . replace ( ' , ' , ' . ' ) )
elif ' . ' in line :
return float
# incase all vert values were ints
return float
2006-09-04 04:23:55 +00:00
2010-09-01 12:11:34 +00:00
def load ( operator , context , filepath ,
CLAMP_SIZE = 0.0 ,
CREATE_FGONS = True ,
CREATE_SMOOTH_GROUPS = True ,
CREATE_EDGES = True ,
SPLIT_OBJECTS = True ,
SPLIT_GROUPS = True ,
ROTATE_X90 = True ,
IMAGE_SEARCH = True ,
POLYGROUPS = False ) :
2009-12-13 14:00:39 +00:00
'''
Called by the user interface or another script .
load_obj ( path ) - should give acceptable results .
This function passes the file and sends the data off
to be split into objects and then converted into mesh objects
'''
2010-08-30 08:28:48 +00:00
print ( ' \n importing obj %r ' % filepath )
2009-12-13 14:00:39 +00:00
2010-08-30 08:44:36 +00:00
if SPLIT_OBJECTS or SPLIT_GROUPS :
2009-12-13 14:00:39 +00:00
POLYGROUPS = False
time_main = time . time ( )
2010-09-01 14:49:34 +00:00
# time_main= sys.time()
2009-12-13 14:00:39 +00:00
verts_loc = [ ]
verts_tex = [ ]
faces = [ ] # tuples of the faces
material_libs = [ ] # filanems to material libs this uses
vertex_groups = { } # when POLYGROUPS is true
# Get the string to float conversion func for this file- is 'float' for almost all files.
float_func = get_float_func ( filepath )
# Context variables
context_material = None
context_smooth_group = None
context_object = None
context_vgroup = None
# Nurbs
context_nurbs = { }
nurbs = [ ]
context_parm = ' ' # used by nurbs too but could be used elsewhere
has_ngons = False
# has_smoothgroups= False - is explicit with len(unique_smooth_groups) being > 0
# Until we can use sets
unique_materials = { }
unique_material_images = { }
unique_smooth_groups = { }
# unique_obects= {} - no use for this variable since the objects are stored in the face.
# when there are faces that end with \
# it means they are multiline-
# since we use xreadline we cant skip to the next line
2010-08-30 19:51:12 +00:00
# so we need to know whether
2009-12-13 14:00:39 +00:00
context_multi_line = ' '
2010-08-30 08:28:48 +00:00
print ( " \t parsing obj file... " )
2009-12-13 14:00:39 +00:00
time_sub = time . time ( )
2010-09-01 14:49:34 +00:00
# time_sub= sys.time()
2009-05-13 00:20:14 +00:00
2009-12-13 14:00:39 +00:00
file = open ( filepath , ' rU ' )
for line in file : #.xreadlines():
line = line . lstrip ( ) # rare cases there is white space at the start of the line
if line . startswith ( ' v ' ) :
line_split = line . split ( )
# rotate X90: (x,-z,y)
verts_loc . append ( ( float_func ( line_split [ 1 ] ) , - float_func ( line_split [ 3 ] ) , float_func ( line_split [ 2 ] ) ) )
elif line . startswith ( ' vn ' ) :
pass
elif line . startswith ( ' vt ' ) :
line_split = line . split ( )
verts_tex . append ( ( float_func ( line_split [ 1 ] ) , float_func ( line_split [ 2 ] ) ) )
# Handel faces lines (as faces) and the second+ lines of fa multiline face here
# use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
elif line . startswith ( ' f ' ) or context_multi_line == ' f ' :
if context_multi_line :
# use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face
line_split = line . split ( )
else :
line_split = line [ 2 : ] . split ( )
face_vert_loc_indicies = [ ]
face_vert_tex_indicies = [ ]
# Instance a face
faces . append ( ( \
face_vert_loc_indicies , \
face_vert_tex_indicies , \
context_material , \
context_smooth_group , \
context_object \
) )
if strip_slash ( line_split ) :
context_multi_line = ' f '
else :
context_multi_line = ' '
for v in line_split :
obj_vert = v . split ( ' / ' )
vert_loc_index = int ( obj_vert [ 0 ] ) - 1
# Add the vertex to the current group
# *warning*, this wont work for files that have groups defined around verts
2010-09-01 14:49:34 +00:00
if POLYGROUPS and context_vgroup :
2009-12-13 14:00:39 +00:00
vertex_groups [ context_vgroup ] . append ( vert_loc_index )
# Make relative negative vert indicies absolute
if vert_loc_index < 0 :
vert_loc_index = len ( verts_loc ) + vert_loc_index + 1
face_vert_loc_indicies . append ( vert_loc_index )
if len ( obj_vert ) > 1 and obj_vert [ 1 ] :
# formatting for faces with normals and textures us
# loc_index/tex_index/nor_index
vert_tex_index = int ( obj_vert [ 1 ] ) - 1
# Make relative negative vert indicies absolute
if vert_tex_index < 0 :
vert_tex_index = len ( verts_tex ) + vert_tex_index + 1
face_vert_tex_indicies . append ( vert_tex_index )
else :
# dummy
face_vert_tex_indicies . append ( 0 )
if len ( face_vert_loc_indicies ) > 4 :
has_ngons = True
elif CREATE_EDGES and ( line . startswith ( ' l ' ) or context_multi_line == ' l ' ) :
# very similar to the face load function above with some parts removed
if context_multi_line :
# use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face
line_split = line . split ( )
else :
line_split = line [ 2 : ] . split ( )
face_vert_loc_indicies = [ ]
face_vert_tex_indicies = [ ]
# Instance a face
faces . append ( ( \
face_vert_loc_indicies , \
face_vert_tex_indicies , \
context_material , \
context_smooth_group , \
context_object \
) )
if strip_slash ( line_split ) :
context_multi_line = ' l '
else :
context_multi_line = ' '
isline = line . startswith ( ' l ' )
for v in line_split :
vert_loc_index = int ( v ) - 1
# Make relative negative vert indicies absolute
if vert_loc_index < 0 :
vert_loc_index = len ( verts_loc ) + vert_loc_index + 1
face_vert_loc_indicies . append ( vert_loc_index )
elif line . startswith ( ' s ' ) :
if CREATE_SMOOTH_GROUPS :
context_smooth_group = line_value ( line . split ( ) )
if context_smooth_group == ' off ' :
context_smooth_group = None
elif context_smooth_group : # is not None
unique_smooth_groups [ context_smooth_group ] = None
elif line . startswith ( ' o ' ) :
if SPLIT_OBJECTS :
context_object = line_value ( line . split ( ) )
# unique_obects[context_object]= None
elif line . startswith ( ' g ' ) :
if SPLIT_GROUPS :
context_object = line_value ( line . split ( ) )
# print 'context_object', context_object
# unique_obects[context_object]= None
elif POLYGROUPS :
context_vgroup = line_value ( line . split ( ) )
if context_vgroup and context_vgroup != ' (null) ' :
vertex_groups . setdefault ( context_vgroup , [ ] )
else :
context_vgroup = None # dont assign a vgroup
elif line . startswith ( ' usemtl ' ) :
context_material = line_value ( line . split ( ) )
unique_materials [ context_material ] = None
elif line . startswith ( ' mtllib ' ) : # usemap or usemat
2010-09-01 14:49:34 +00:00
material_libs = list ( set ( material_libs ) | set ( line . split ( ) [ 1 : ] ) ) # can have multiple mtllib filenames per line, mtllib can appear more than once, so make sure only occurance of material exists
2009-12-13 14:00:39 +00:00
# Nurbs support
elif line . startswith ( ' cstype ' ) :
context_nurbs [ ' cstype ' ] = line_value ( line . split ( ) ) # 'rat bspline' / 'bspline'
elif line . startswith ( ' curv ' ) or context_multi_line == ' curv ' :
line_split = line . split ( )
curv_idx = context_nurbs [ ' curv_idx ' ] = context_nurbs . get ( ' curv_idx ' , [ ] ) # incase were multiline
if not context_multi_line :
context_nurbs [ ' curv_range ' ] = float_func ( line_split [ 1 ] ) , float_func ( line_split [ 2 ] )
line_split [ 0 : 3 ] = [ ] # remove first 3 items
if strip_slash ( line_split ) :
context_multi_line = ' curv '
else :
context_multi_line = ' '
for i in line_split :
vert_loc_index = int ( i ) - 1
if vert_loc_index < 0 :
vert_loc_index = len ( verts_loc ) + vert_loc_index + 1
curv_idx . append ( vert_loc_index )
elif line . startswith ( ' parm ' ) or context_multi_line == ' parm ' :
line_split = line . split ( )
if context_multi_line :
context_multi_line = ' '
else :
context_parm = line_split [ 1 ]
line_split [ 0 : 2 ] = [ ] # remove first 2
if strip_slash ( line_split ) :
context_multi_line = ' parm '
else :
context_multi_line = ' '
if context_parm . lower ( ) == ' u ' :
context_nurbs . setdefault ( ' parm_u ' , [ ] ) . extend ( [ float_func ( f ) for f in line_split ] )
elif context_parm . lower ( ) == ' v ' : # surfaces not suported yet
context_nurbs . setdefault ( ' parm_v ' , [ ] ) . extend ( [ float_func ( f ) for f in line_split ] )
# else: # may want to support other parm's ?
elif line . startswith ( ' deg ' ) :
context_nurbs [ ' deg ' ] = [ int ( i ) for i in line . split ( ) [ 1 : ] ]
elif line . startswith ( ' end ' ) :
# Add the nurbs curve
if context_object :
context_nurbs [ ' name ' ] = context_object
nurbs . append ( context_nurbs )
context_nurbs = { }
context_parm = ' '
''' # How to use usemap? depricated?
elif line . startswith ( ' usema ' ) : # usemap or usemat
context_image = line_value ( line . split ( ) )
'''
file . close ( )
time_new = time . time ( )
2010-09-01 14:49:34 +00:00
# time_new= sys.time()
2009-12-13 14:00:39 +00:00
print ( ' %.4f sec ' % ( time_new - time_sub ) )
time_sub = time_new
print ( ' \t loading materials and images... ' )
create_materials ( filepath , material_libs , unique_materials , unique_material_images , IMAGE_SEARCH )
time_new = time . time ( )
2010-09-01 14:49:34 +00:00
# time_new= sys.time()
2009-12-13 14:00:39 +00:00
print ( ' %.4f sec ' % ( time_new - time_sub ) )
time_sub = time_new
if not ROTATE_X90 :
verts_loc [ : ] = [ ( v [ 0 ] , v [ 2 ] , - v [ 1 ] ) for v in verts_loc ]
# deselect all
2010-08-27 04:07:12 +00:00
bpy . ops . object . select_all ( action = ' DESELECT ' )
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
scene = context . scene
2010-09-01 14:49:34 +00:00
# scn.objects.selected = []
2009-12-13 14:00:39 +00:00
new_objects = [ ] # put new objects here
print ( ' \t building geometry... \n \t verts: %i faces: %i materials: %i smoothgroups: %i ... ' % ( len ( verts_loc ) , len ( faces ) , len ( unique_materials ) , len ( unique_smooth_groups ) ) )
# Split the mesh by objects/materials, may
2010-09-01 14:49:34 +00:00
if SPLIT_OBJECTS or SPLIT_GROUPS : SPLIT_OB_OR_GROUP = True
else : SPLIT_OB_OR_GROUP = False
2009-12-13 14:00:39 +00:00
2010-08-13 15:07:57 +00:00
for verts_loc_split , faces_split , unique_materials_split , dataname in split_mesh ( verts_loc , faces , unique_materials , filepath , SPLIT_OB_OR_GROUP ) :
2009-12-13 14:00:39 +00:00
# Create meshes from the data, warning 'vertex_groups' wont support splitting
2010-08-13 15:07:57 +00:00
create_mesh ( new_objects , has_ngons , CREATE_FGONS , CREATE_EDGES , verts_loc_split , verts_tex , faces_split , unique_materials_split , unique_material_images , unique_smooth_groups , vertex_groups , dataname )
2009-12-13 14:00:39 +00:00
# nurbs support
2010-08-27 04:07:12 +00:00
for context_nurbs in nurbs :
create_nurbs ( context_nurbs , verts_loc , new_objects )
2009-12-13 14:00:39 +00:00
2010-08-13 15:07:57 +00:00
# Create new obj
for obj in new_objects :
2010-08-27 04:07:12 +00:00
base = scene . objects . link ( obj )
base . select = True
2010-08-13 15:07:57 +00:00
scene . update ( )
2009-12-13 14:00:39 +00:00
axis_min = [ 1000000000 ] * 3
axis_max = [ - 1000000000 ] * 3
2010-09-01 14:49:34 +00:00
# if CLAMP_SIZE:
# # Get all object bounds
# for ob in new_objects:
# for v in ob.getBoundBox():
# for axis, value in enumerate(v):
# if axis_min[axis] > value: axis_min[axis]= value
# if axis_max[axis] < value: axis_max[axis]= value
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# # Scale objects
# max_axis= max(axis_max[0]-axis_min[0], axis_max[1]-axis_min[1], axis_max[2]-axis_min[2])
# scale= 1.0
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# while CLAMP_SIZE < max_axis * scale:
# scale= scale/10.0
2009-12-13 14:00:39 +00:00
2010-09-01 14:49:34 +00:00
# for ob in new_objects:
# ob.setSize(scale, scale, scale)
2009-09-29 15:27:00 +00:00
2009-12-13 14:00:39 +00:00
# Better rotate the vert locations
#if not ROTATE_X90:
2010-09-01 14:49:34 +00:00
# for ob in new_objects:
# ob.RotX = -1.570796326794896558
2009-12-13 14:00:39 +00:00
time_new = time . time ( )
2010-09-01 14:49:34 +00:00
# time_new= sys.time()
2009-12-13 14:00:39 +00:00
2010-08-30 08:28:48 +00:00
print ( ' finished importing: %r in %.4f sec. ' % ( filepath , ( time_new - time_main ) ) )
2010-09-01 12:11:34 +00:00
return { ' FINISHED ' }
- add torus back from 2.4x as an operator
bpy.ops.mesh.primitive_torus_add(major_radius=1, minor_radius=0.25, major_segments=48, minor_segments=16)
- experemental dynamic menus, used for INFO_MT_file, INFO_MT_file_import, INFO_MT_file_export and INFO_MT_mesh_add. these can have items added from python.
eg.
- removed OBJECT_OT_mesh_add, use the python add menu instead.
- made mesh primitive ops - MESH_OT_primitive_plane_add, ...cube_add, etc. work in object mode.
- RNA scene.active_object wrapped
- bugfix [#19466] 2.5: Tweak menu only available for mesh objects added within Edit Mode
ED_object_exit_editmode was always doing an undo push, made this optional using the existing flag - EM_DO_UNDO, called everywhere except when adding primitives.
2009-10-10 21:23:20 +00:00
2009-09-29 15:27:00 +00:00
# NOTES (all line numbers refer to 2.4x import_obj.py, not this file)
# check later: line 489
# can convert now: edge flags, edges: lines 508-528
# ngon (uses python module BPyMesh): 384-414
# NEXT clamp size: get bound box with RNA
# get back to l 140 (here)
# search image in bpy.config.textureDir - load_image
# replaced BPyImage.comprehensiveImageLoad with a simplified version that only checks additional directory specified, but doesn't search dirs recursively (obj_image_load)
# bitmask won't work? - 132
# uses bpy.sys.time()
2010-02-16 09:55:07 +00:00
if __name__ == " __main__ " :
register ( )