2004-06-07 01:34:15 +00:00
|
|
|
#!BPY
|
|
|
|
"""
|
2006-06-29 07:06:54 +00:00
|
|
|
Name: 'LightWave (.lwo)...'
|
2006-06-30 11:22:45 +00:00
|
|
|
Blender: 239
|
2004-06-07 01:34:15 +00:00
|
|
|
Group: 'Import'
|
2006-06-29 07:06:54 +00:00
|
|
|
Tooltip: 'Import LightWave Object File Format'
|
2004-06-07 01:34:15 +00:00
|
|
|
"""
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
__author__ = ["Alessandro Pirovano, Anthony D'Agostino (Scorpius)", "Campbell Barton (ideasman42)", "ZanQdo"]
|
|
|
|
__url__ = ("www.blender.org", "blenderartist.org",
|
2006-01-29 19:17:53 +00:00
|
|
|
"Anthony's homepage, http://www.redrival.com/scorpius", "Alessandro's homepage, http://uaraus.altervista.org")
|
2004-11-07 16:31:13 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
importername = "lwo_import 0.4.0"
|
2006-01-29 19:17:53 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# +---------------------------------------------------------+
|
|
|
|
# | Save your work before and after use. |
|
|
|
|
# | Please report any useful comment to: |
|
|
|
|
# | uaraus-dem@yahoo.it |
|
|
|
|
# | Thanks |
|
|
|
|
# +---------------------------------------------------------+
|
2004-06-07 01:34:15 +00:00
|
|
|
# +---------------------------------------------------------+
|
|
|
|
# | Copyright (c) 2002 Anthony D'Agostino |
|
|
|
|
# | http://www.redrival.com/scorpius |
|
|
|
|
# | scorpius@netzero.com |
|
|
|
|
# | April 21, 2002 |
|
|
|
|
# | Import Export Suite v0.5 |
|
|
|
|
# +---------------------------------------------------------+
|
|
|
|
# | Read and write LightWave Object File Format (*.lwo) |
|
|
|
|
# +---------------------------------------------------------+
|
2005-10-11 02:32:58 +00:00
|
|
|
# +---------------------------------------------------------+
|
|
|
|
# | Alessandro Pirovano tweaked starting on March 2005 |
|
|
|
|
# | http://uaraus.altervista.org |
|
|
|
|
# +---------------------------------------------------------+
|
2006-01-29 19:17:53 +00:00
|
|
|
# +----------------------------------------------------------
|
|
|
|
# | GPL license block
|
|
|
|
# |
|
|
|
|
# | This program is free software; you can redistribute it and/or modify
|
|
|
|
# | it under the terms of the GNU General Public License as published by
|
|
|
|
# | the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# | (at your option) any later version.
|
|
|
|
# |
|
|
|
|
# | This program is distributed in the hope that it will be useful,
|
|
|
|
# | but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# | GNU General Public License for more details.
|
|
|
|
# |
|
|
|
|
# | You should have received a copy of the GNU General Public License
|
|
|
|
# | along with this program; if not, write to the Free Software
|
|
|
|
# | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
# +----------------------------------------------------------
|
2005-10-11 02:32:58 +00:00
|
|
|
# +---------------------------------------------------------+
|
|
|
|
# | Release log: |
|
2007-04-04 09:11:03 +00:00
|
|
|
# | 0.4.0 : Updated for blender 2.44 |
|
|
|
|
# | ZanQdo - made the mesh import the right way up |
|
|
|
|
# | Ideasman42 - Updated functions for the bew API |
|
|
|
|
# | as well as removing the text object class |
|
2006-07-03 20:17:40 +00:00
|
|
|
# | 0.2.2 : This code works with Blender 2.42 RC3 |
|
|
|
|
# | Added a new PolyFill function for BPYMesh's |
|
|
|
|
# | ngon() to use, checked compatibility |
|
|
|
|
# | lightwaves ngons are imported as fgons |
|
|
|
|
# | Checked compatibility against 1711 lwo files |
|
2006-01-29 19:17:53 +00:00
|
|
|
# | 0.2.1 : This code works with Blender 2.40 RC1 |
|
|
|
|
# | modified material mode assignment to deal with |
|
|
|
|
# | Python API modification |
|
|
|
|
# | Changed script license to GNU GPL |
|
|
|
|
# | 0.2.0: This code works with Blender 2.40a2 or up |
|
|
|
|
# | Major rewrite to deal with large meshes |
|
|
|
|
# | - 2 pass file parsing |
|
2007-04-04 09:11:03 +00:00
|
|
|
# | - lower memory foot###if DEBUG: print |
|
2006-01-29 19:17:53 +00:00
|
|
|
# | (as long as python gc allows) |
|
|
|
|
# | 2.40a2 - Removed subsurf settings patches=poly |
|
|
|
|
# | 2.40a2 - Edge generation instead of 2vert faces |
|
2005-10-11 02:32:58 +00:00
|
|
|
# | 0.1.16: fixed (try 2) texture offset calculations |
|
|
|
|
# | added hint on axis mapping |
|
|
|
|
# | added hint on texture blending mode |
|
|
|
|
# | added hint on texture transparency setting |
|
|
|
|
# | search images in original directory first |
|
|
|
|
# | fixed texture order application |
|
|
|
|
# | 0.1.15: added release log |
|
|
|
|
# | fixed texture offset calculations (non-UV) |
|
|
|
|
# | fixed reverting vertex order in face generation |
|
|
|
|
# | associate texture on game-engine settings |
|
|
|
|
# | vector math definitely based on mathutils |
|
|
|
|
# | search images in "Images" and "../Images" dir |
|
|
|
|
# | revised logging facility |
|
|
|
|
# | fixed subsurf texture and material mappings |
|
|
|
|
# | 0.1.14: patched missing mod_vector (not definitive) |
|
|
|
|
# | 0.1.13: first public release |
|
|
|
|
# +---------------------------------------------------------+
|
|
|
|
|
|
|
|
#blender related import
|
|
|
|
import Blender
|
2007-04-04 09:11:03 +00:00
|
|
|
import bpy
|
2006-06-29 07:06:54 +00:00
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
# use for comprehensiveImageLoad
|
|
|
|
import BPyImage
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2006-07-03 20:17:40 +00:00
|
|
|
# Use this ngon function
|
|
|
|
import BPyMesh
|
|
|
|
|
2006-09-26 04:39:46 +00:00
|
|
|
import BPyMessages
|
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
#python specific modules import
|
2006-07-03 20:17:40 +00:00
|
|
|
try:
|
2007-04-04 09:11:03 +00:00
|
|
|
import struct, chunk, cStringIO
|
2006-07-03 20:17:40 +00:00
|
|
|
except:
|
2007-04-04 09:11:03 +00:00
|
|
|
struct= chunk= cStringIO= None
|
|
|
|
|
2008-10-21 00:21:02 +00:00
|
|
|
# python 2.3 has no reversed() iterator. this will only work on lists and tuples
|
|
|
|
try:
|
|
|
|
reversed
|
|
|
|
except:
|
|
|
|
def reversed(l): return l[::-1]
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
### # Debuggin disabled in release.
|
|
|
|
### # do a search replace to enabe debug prints
|
|
|
|
### DEBUG = False
|
2004-06-07 01:34:15 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# ===========================================================
|
|
|
|
# === Utility Preamble ======================================
|
|
|
|
# ===========================================================
|
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
textname = None
|
|
|
|
#uncomment the following line to enable logging facility to the named text object
|
|
|
|
#textname = "lwo_log"
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
TXMTX = Blender.Mathutils.Matrix(\
|
|
|
|
[1, 0, 0, 0],\
|
|
|
|
[0, 0, 1, 0],\
|
|
|
|
[0, 1, 0, 0],\
|
|
|
|
[0, 0, 0, 1])
|
2006-01-29 19:17:53 +00:00
|
|
|
|
|
|
|
# ===========================================================
|
|
|
|
# === Make sure it is a string ... deal with strange chars ==
|
|
|
|
# ===========================================================
|
|
|
|
def safestring(st):
|
2006-06-30 11:22:45 +00:00
|
|
|
myst = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
for ll in xrange(len(st)):
|
|
|
|
if st[ll] < " ":
|
|
|
|
myst += "#"
|
|
|
|
else:
|
|
|
|
myst += st[ll]
|
|
|
|
return myst
|
2006-01-29 19:17:53 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# ===========================================================
|
|
|
|
# === Main read functions ===================================
|
|
|
|
# ===========================================================
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# =============================
|
|
|
|
# === Read LightWave Format ===
|
|
|
|
# =============================
|
|
|
|
def read(filename):
|
2006-09-26 04:39:46 +00:00
|
|
|
if BPyMessages.Error_NoFile(filename):
|
|
|
|
return
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
print "This is: %s" % importername
|
|
|
|
print "Importing file:", filename
|
2007-04-18 14:40:01 +00:00
|
|
|
bpy.data.scenes.active.objects.selected = []
|
2006-06-30 15:41:20 +00:00
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
start = Blender.sys.time()
|
2006-06-29 07:06:54 +00:00
|
|
|
file = open(filename, "rb")
|
2006-06-30 15:41:20 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
editmode = Blender.Window.EditMode() # are we in edit mode? If so ...
|
|
|
|
if editmode: Blender.Window.EditMode(0) # leave edit mode before getting the mesh # === LWO header ===
|
2007-02-08 00:27:01 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
form_id, form_size, form_type = struct.unpack(">4s1L4s", file.read(12))
|
|
|
|
except:
|
|
|
|
Blender.Draw.PupMenu('Error%t|This is not a lightwave file')
|
|
|
|
return
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
if (form_type == "LWOB"):
|
|
|
|
read_lwob(file, filename)
|
|
|
|
elif (form_type == "LWO2"):
|
|
|
|
read_lwo2(file, filename)
|
|
|
|
else:
|
2007-04-04 09:11:03 +00:00
|
|
|
print "Can't read a file with the form_type: %s" % form_type
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
Blender.Window.DrawProgressBar(1.0, "") # clear progressbar
|
2006-06-29 07:06:54 +00:00
|
|
|
file.close()
|
2006-06-30 11:22:45 +00:00
|
|
|
end = Blender.sys.time()
|
2006-06-29 07:06:54 +00:00
|
|
|
seconds = " in %.2f %s" % (end-start, "seconds")
|
|
|
|
if form_type == "LWO2": fmt = " (v6.0 Format)"
|
|
|
|
if form_type == "LWOB": fmt = " (v5.5 Format)"
|
2007-04-04 09:11:03 +00:00
|
|
|
print "Successfully imported " + filename.split('\\')[-1].split('/')[-1] + fmt + seconds
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
if editmode: Blender.Window.EditMode(1) # optional, just being nice
|
2006-06-30 11:22:45 +00:00
|
|
|
Blender.Redraw()
|
2006-01-29 19:17:53 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# enddef read
|
|
|
|
|
|
|
|
|
|
|
|
# =================================
|
|
|
|
# === Read LightWave 5.5 format ===
|
|
|
|
# =================================
|
|
|
|
def read_lwob(file, filename):
|
2006-06-30 11:22:45 +00:00
|
|
|
#This function is directly derived from the LWO2 import routine
|
|
|
|
#dropping all the material analysis parts
|
2006-06-29 07:06:54 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "LightWave 5.5 format"
|
2006-06-29 07:06:54 +00:00
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
dir_part = Blender.sys.dirname(filename)
|
|
|
|
fname_part = Blender.sys.basename(filename)
|
|
|
|
#ask_weird = 1
|
|
|
|
|
|
|
|
#first initialization of data structures
|
2007-04-04 09:11:03 +00:00
|
|
|
defaultname = Blender.sys.splitext(fname_part)[0]
|
2006-06-30 11:22:45 +00:00
|
|
|
tag_list = [] #tag list: global for the whole file?
|
|
|
|
surf_list = [] #surf list: global for the whole file?
|
|
|
|
clip_list = [] #clip list: global for the whole file?
|
|
|
|
object_index = 0
|
|
|
|
object_list = None
|
|
|
|
objspec_list = None
|
|
|
|
|
|
|
|
#add default material for orphaned faces, if any
|
2007-04-18 14:40:01 +00:00
|
|
|
surf_list.append({'NAME': "_Orphans", 'g_MAT': bpy.data.materials.new("_Orphans")})
|
2006-06-30 11:22:45 +00:00
|
|
|
|
|
|
|
#pass 2: effectively generate objects
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Pass 1: dry import"
|
2006-06-30 11:22:45 +00:00
|
|
|
file.seek(0)
|
|
|
|
objspec_list = ["imported", {}, [], [], {}, {}, 0, {}, {}]
|
|
|
|
# === LWO header ===
|
|
|
|
form_id, form_size, form_type = struct.unpack(">4s1L4s", file.read(12))
|
|
|
|
if (form_type != "LWOB"):
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "??? Inconsistent file type: %s" % form_type
|
2006-06-30 11:22:45 +00:00
|
|
|
return
|
2006-06-29 07:06:54 +00:00
|
|
|
while 1:
|
|
|
|
try:
|
|
|
|
lwochunk = chunk.Chunk(file)
|
|
|
|
except EOFError:
|
|
|
|
break
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print ' ',
|
2006-06-29 07:06:54 +00:00
|
|
|
if lwochunk.chunkname == "LAYR":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- LAYR",
|
2006-06-29 07:06:54 +00:00
|
|
|
objname = read_layr(lwochunk)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print objname
|
2006-06-30 11:22:45 +00:00
|
|
|
if objspec_list != None: #create the object
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
objspec_list = [objname, {}, [], [], {}, {}, 0, {}, {}]
|
|
|
|
object_index += 1
|
2006-06-29 07:06:54 +00:00
|
|
|
elif lwochunk.chunkname == "PNTS": # Verts
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- PNTS",
|
2006-06-29 07:06:54 +00:00
|
|
|
verts = read_verts(lwochunk)
|
2006-06-30 11:22:45 +00:00
|
|
|
objspec_list[2] = verts
|
2006-06-29 07:06:54 +00:00
|
|
|
elif lwochunk.chunkname == "POLS": # Faces v5.5
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- POLS(5.5)"
|
2006-06-29 07:06:54 +00:00
|
|
|
faces = read_faces_5(lwochunk)
|
2006-06-30 11:22:45 +00:00
|
|
|
flag = 0
|
|
|
|
#flag is 0 for regular polygon, 1 for patches (= subsurf), 2 for anything else to be ignored
|
|
|
|
if flag<2:
|
|
|
|
if objspec_list[3] != []:
|
|
|
|
#create immediately the object
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
#update with new data
|
|
|
|
objspec_list = [objspec_list[0], #update name
|
|
|
|
{}, #init
|
|
|
|
objspec_list[2], #same vertexes
|
|
|
|
faces, #give it the new faces
|
|
|
|
{}, #no need to copy - filled at runtime
|
|
|
|
{}, #polygon tagging will follow
|
|
|
|
flag, #patch flag
|
|
|
|
objspec_list[7], #same uvcoords
|
|
|
|
{}] #no vmad mapping
|
|
|
|
object_index += 1
|
|
|
|
#end if already has a face list
|
|
|
|
objspec_list[3] = faces
|
|
|
|
objname = objspec_list[0]
|
|
|
|
if objname == None:
|
|
|
|
objname = defaultname
|
|
|
|
#end if processing a valid poly type
|
2006-06-29 07:06:54 +00:00
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- %s: skipping (definitely!)" % lwochunk.chunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
lwochunk.skip()
|
2006-06-30 11:22:45 +00:00
|
|
|
#uncomment here to log data structure as it is built
|
2007-04-04 09:11:03 +00:00
|
|
|
# ###if DEBUG: print object_list
|
2006-06-30 11:22:45 +00:00
|
|
|
#last object read
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
objspec_list = None
|
|
|
|
surf_list = None
|
|
|
|
clip_list = None
|
|
|
|
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "\nFound %d objects:" % object_index
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# enddef read_lwob
|
|
|
|
|
|
|
|
|
|
|
|
# =============================
|
|
|
|
# === Read LightWave Format ===
|
|
|
|
# =============================
|
|
|
|
def read_lwo2(file, filename, typ="LWO2"):
|
2006-06-29 07:06:54 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "LightWave 6 (and above) format"
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
dir_part = Blender.sys.dirname(filename)
|
|
|
|
fname_part = Blender.sys.basename(filename)
|
|
|
|
ask_weird = 1
|
|
|
|
|
|
|
|
#first initialization of data structures
|
2007-04-04 09:11:03 +00:00
|
|
|
defaultname = Blender.sys.splitext(fname_part)[0]
|
2006-06-29 07:06:54 +00:00
|
|
|
tag_list = [] #tag list: global for the whole file?
|
|
|
|
surf_list = [] #surf list: global for the whole file?
|
|
|
|
clip_list = [] #clip list: global for the whole file?
|
|
|
|
object_index = 0
|
|
|
|
object_list = None
|
|
|
|
objspec_list = None
|
|
|
|
# init value is: object_list = [[None, {}, [], [], {}, {}, 0, {}, {}]]
|
|
|
|
#0 - objname #original name
|
|
|
|
#1 - obj_dict = {TAG} #objects created
|
|
|
|
#2 - verts = [] #object vertexes
|
|
|
|
#3 - faces = [] #object faces (associations poly -> vertexes)
|
|
|
|
#4 - obj_dim_dict = {TAG} #tuples size and pos in local object coords - used for NON-UV mappings
|
|
|
|
#5 - polytag_dict = {TAG} #tag to polygons mapping
|
|
|
|
#6 - patch_flag #0 = surf; 1 = patch (subdivision surface) - it was the image list
|
|
|
|
#7 - uvcoords_dict = {name} #uvmap coordinates (mixed mode per vertex/per face)
|
|
|
|
#8 - facesuv_dict = {name} #vmad only coordinates associations poly & vertex -> uv tuples
|
|
|
|
|
|
|
|
#pass 1: look in advance for materials
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Starting Pass 1: hold on tight"
|
2006-06-29 07:06:54 +00:00
|
|
|
while 1:
|
|
|
|
try:
|
|
|
|
lwochunk = chunk.Chunk(file)
|
|
|
|
except EOFError:
|
|
|
|
break
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print ' ',
|
2006-06-29 07:06:54 +00:00
|
|
|
if lwochunk.chunkname == "TAGS": # Tags
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- TAGS"
|
2006-06-29 07:06:54 +00:00
|
|
|
tag_list.extend(read_tags(lwochunk))
|
|
|
|
elif lwochunk.chunkname == "SURF": # surfaces
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- SURF"
|
2006-06-29 07:06:54 +00:00
|
|
|
surf_list.append(read_surfs(lwochunk, surf_list, tag_list))
|
|
|
|
elif lwochunk.chunkname == "CLIP": # texture images
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- CLIP"
|
2006-06-29 07:06:54 +00:00
|
|
|
clip_list.append(read_clip(lwochunk, dir_part))
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "read total %s clips up to now" % len(clip_list)
|
2006-06-29 07:06:54 +00:00
|
|
|
else: # Misc Chunks
|
|
|
|
if ask_weird:
|
|
|
|
ckname = safestring(lwochunk.chunkname)
|
|
|
|
if "#" in ckname:
|
|
|
|
choice = Blender.Draw.PupMenu("WARNING: file could be corrupted.%t|Import anyway|Give up")
|
|
|
|
if choice != 1:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- %s: Maybe file corrupted. Terminated by user" % lwochunk.chunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
ask_weird = 0
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- %s: skipping (maybe later)" % lwochunk.chunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
lwochunk.skip()
|
|
|
|
|
|
|
|
#add default material for orphaned faces, if any
|
2007-04-18 14:40:01 +00:00
|
|
|
surf_list.append({'NAME': "_Orphans", 'g_MAT': bpy.data.materials.new("_Orphans")})
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
#pass 2: effectively generate objects
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Pass 2: now for the hard part"
|
2006-06-29 07:06:54 +00:00
|
|
|
file.seek(0)
|
|
|
|
# === LWO header ===
|
|
|
|
form_id, form_size, form_type = struct.unpack(">4s1L4s", file.read(12))
|
|
|
|
if (form_type != "LWO2"):
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "??? Inconsistent file type: %s" % form_type
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
while 1:
|
|
|
|
try:
|
|
|
|
lwochunk = chunk.Chunk(file)
|
|
|
|
except EOFError:
|
|
|
|
break
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print ' ',
|
2006-06-29 07:06:54 +00:00
|
|
|
if lwochunk.chunkname == "LAYR":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- LAYR"
|
2006-06-29 07:06:54 +00:00
|
|
|
objname = read_layr(lwochunk)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print objname
|
2006-06-29 07:06:54 +00:00
|
|
|
if objspec_list != None: #create the object
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
objspec_list = [objname, {}, [], [], {}, {}, 0, {}, {}]
|
|
|
|
object_index += 1
|
|
|
|
elif lwochunk.chunkname == "PNTS": # Verts
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- PNTS"
|
2006-06-29 07:06:54 +00:00
|
|
|
verts = read_verts(lwochunk)
|
|
|
|
objspec_list[2] = verts
|
|
|
|
elif lwochunk.chunkname == "VMAP": # MAPS (UV)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- VMAP"
|
2006-06-29 07:06:54 +00:00
|
|
|
#objspec_list[7] = read_vmap(objspec_list[7], len(objspec_list[2]), lwochunk)
|
|
|
|
read_vmap(objspec_list[7], len(objspec_list[2]), lwochunk)
|
|
|
|
elif lwochunk.chunkname == "VMAD": # MAPS (UV) per-face
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- VMAD"
|
2006-06-29 07:06:54 +00:00
|
|
|
#objspec_list[7], objspec_list[8] = read_vmad(objspec_list[7], objspec_list[8], len(objspec_list[3]), len(objspec_list[2]), lwochunk)
|
|
|
|
read_vmad(objspec_list[7], objspec_list[8], len(objspec_list[3]), len(objspec_list[2]), lwochunk)
|
|
|
|
elif lwochunk.chunkname == "POLS": # Faces v6.0
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- POLS(6)"
|
2006-06-29 07:06:54 +00:00
|
|
|
faces, flag = read_faces_6(lwochunk)
|
|
|
|
#flag is 0 for regular polygon, 1 for patches (= subsurf), 2 for anything else to be ignored
|
|
|
|
if flag<2:
|
|
|
|
if objspec_list[3] != []:
|
|
|
|
#create immediately the object
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
#update with new data
|
|
|
|
objspec_list = [objspec_list[0], #update name
|
|
|
|
{}, #init
|
|
|
|
objspec_list[2], #same vertexes
|
|
|
|
faces, #give it the new faces
|
|
|
|
{}, #no need to copy - filled at runtime
|
|
|
|
{}, #polygon tagging will follow
|
|
|
|
flag, #patch flag
|
|
|
|
objspec_list[7], #same uvcoords
|
|
|
|
{}] #no vmad mapping
|
|
|
|
object_index += 1
|
|
|
|
#end if already has a face list
|
|
|
|
objspec_list[3] = faces
|
|
|
|
objname = objspec_list[0]
|
|
|
|
if objname == None:
|
|
|
|
objname = defaultname
|
|
|
|
#end if processing a valid poly type
|
|
|
|
elif lwochunk.chunkname == "PTAG": # PTags
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- PTAG"
|
2006-06-29 07:06:54 +00:00
|
|
|
polytag_dict = read_ptags(lwochunk, tag_list)
|
2007-02-08 00:27:01 +00:00
|
|
|
for kk, polytag_dict_val in polytag_dict.iteritems(): objspec_list[5][kk] = polytag_dict_val
|
2006-06-29 07:06:54 +00:00
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---- %s: skipping (definitely!)" % lwochunk.chunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
lwochunk.skip()
|
|
|
|
#uncomment here to log data structure as it is built
|
2007-04-04 09:11:03 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
#last object read
|
|
|
|
create_objects(clip_list, objspec_list, surf_list)
|
|
|
|
update_material(clip_list, objspec_list, surf_list) #give it all the object
|
|
|
|
objspec_list = None
|
|
|
|
surf_list = None
|
|
|
|
clip_list = None
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "\nFound %d objects:" % object_index
|
2005-10-11 02:32:58 +00:00
|
|
|
# enddef read_lwo2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ===========================================================
|
|
|
|
# === File reading routines =================================
|
|
|
|
# ===========================================================
|
2004-06-07 01:34:15 +00:00
|
|
|
# ==================
|
|
|
|
# === Read Verts ===
|
|
|
|
# ==================
|
|
|
|
def read_verts(lwochunk):
|
2007-04-04 09:11:03 +00:00
|
|
|
#data = cStringIO.StringIO(lwochunk.read())
|
2006-06-29 07:06:54 +00:00
|
|
|
numverts = lwochunk.chunksize/12
|
2007-04-04 09:11:03 +00:00
|
|
|
return [struct.unpack(">fff", lwochunk.read(12)) for i in xrange(numverts)]
|
2005-10-11 02:32:58 +00:00
|
|
|
# enddef read_verts
|
|
|
|
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# =================
|
|
|
|
# === Read Name ===
|
|
|
|
# =================
|
2005-10-11 02:32:58 +00:00
|
|
|
# modified to deal with odd lenght strings
|
2004-06-07 01:34:15 +00:00
|
|
|
def read_name(file):
|
2006-06-30 11:22:45 +00:00
|
|
|
name = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
while 1:
|
|
|
|
char = file.read(1)
|
|
|
|
if char == "\0": break
|
|
|
|
else: name += char
|
|
|
|
len_name = len(name) + 1 #count the trailing zero
|
|
|
|
if len_name%2==1:
|
|
|
|
char = file.read(1) #remove zero padding to even lenght
|
|
|
|
len_name += 1
|
|
|
|
return name, len_name
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# ==================
|
|
|
|
# === Read Layer ===
|
|
|
|
# ==================
|
|
|
|
def read_layr(lwochunk):
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
idx, flags = struct.unpack(">hh", data.read(4))
|
|
|
|
pivot = struct.unpack(">fff", data.read(12))
|
|
|
|
layer_name, discard = read_name(data)
|
|
|
|
if not layer_name: layer_name = "NoName"
|
|
|
|
return layer_name
|
2005-10-11 02:32:58 +00:00
|
|
|
# enddef read_layr
|
|
|
|
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# ======================
|
|
|
|
# === Read Faces 5.5 ===
|
|
|
|
# ======================
|
|
|
|
def read_faces_5(lwochunk):
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
faces = []
|
|
|
|
i = 0
|
|
|
|
while i < lwochunk.chunksize:
|
2006-06-30 11:22:45 +00:00
|
|
|
#if not i%1000 and my_meshtools.show_progress:
|
|
|
|
# Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading Faces")
|
2006-07-02 09:44:44 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
numfaceverts, = struct.unpack(">H", data.read(2))
|
|
|
|
facev = [struct.unpack(">H", data.read(2))[0] for j in xrange(numfaceverts)]
|
|
|
|
facev.reverse()
|
|
|
|
faces.append(facev)
|
|
|
|
surfaceindex, = struct.unpack(">H", data.read(2))
|
|
|
|
if surfaceindex < 0:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "***Error. Referencing uncorrect surface index"
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
i += (4+numfaceverts*2)
|
|
|
|
return faces
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# ==================================
|
|
|
|
# === Read Variable-Length Index ===
|
|
|
|
# ==================================
|
|
|
|
def read_vx(data):
|
2006-06-29 07:06:54 +00:00
|
|
|
byte1, = struct.unpack(">B", data.read(1))
|
|
|
|
if byte1 != 0xFF: # 2-byte index
|
|
|
|
byte2, = struct.unpack(">B", data.read(1))
|
|
|
|
index = byte1*256 + byte2
|
|
|
|
index_size = 2
|
|
|
|
else: # 4-byte index
|
|
|
|
byte2, byte3, byte4 = struct.unpack(">3B", data.read(3))
|
|
|
|
index = byte2*65536 + byte3*256 + byte4
|
|
|
|
index_size = 4
|
|
|
|
return index, index_size
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ======================
|
|
|
|
# === Read uvmapping ===
|
|
|
|
# ======================
|
2006-01-29 19:17:53 +00:00
|
|
|
def read_vmap(uvcoords_dict, maxvertnum, lwochunk):
|
2007-04-21 11:23:45 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
if maxvertnum == 0:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Found VMAP but no vertexes to map!"
|
2006-06-29 07:06:54 +00:00
|
|
|
return uvcoords_dict
|
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
map_type = data.read(4)
|
|
|
|
if map_type != "TXUV":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Reading VMAP: No Texture UV map Were Found. Map Type: %s" % map_type
|
2006-06-29 07:06:54 +00:00
|
|
|
return uvcoords_dict
|
|
|
|
dimension, = struct.unpack(">H", data.read(2))
|
|
|
|
name, i = read_name(data) #i initialized with string lenght + zeros
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "TXUV %d %s" % (dimension, name)
|
2006-06-29 07:06:54 +00:00
|
|
|
#note if there is already a VMAD it will be lost
|
|
|
|
#it is assumed that VMAD will follow the corresponding VMAP
|
2007-02-08 00:27:01 +00:00
|
|
|
Vector = Blender.Mathutils.Vector
|
2006-06-29 07:06:54 +00:00
|
|
|
try: #if uvcoords_dict.has_key(name):
|
|
|
|
my_uv_dict = uvcoords_dict[name] #update existing
|
|
|
|
except: #else:
|
|
|
|
my_uv_dict = {} #start a brand new: this could be made more smart
|
|
|
|
while (i < lwochunk.chunksize - 6): #4+2 header bytes already read
|
|
|
|
vertnum, vnum_size = read_vx(data)
|
2006-06-30 11:22:45 +00:00
|
|
|
uv = struct.unpack(">ff", data.read(8))
|
2006-06-29 07:06:54 +00:00
|
|
|
if vertnum >= maxvertnum:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Hem: more uvmap than vertexes? ignoring uv data for vertex %d" % vertnum
|
|
|
|
pass
|
2006-06-29 07:06:54 +00:00
|
|
|
else:
|
2007-02-08 00:27:01 +00:00
|
|
|
my_uv_dict[vertnum] = Vector(uv)
|
2006-06-29 07:06:54 +00:00
|
|
|
i += 8 + vnum_size
|
|
|
|
#end loop on uv pairs
|
|
|
|
uvcoords_dict[name] = my_uv_dict
|
|
|
|
#this is a per-vertex mapping AND the uv tuple is vertex-ordered, so faces_uv is the same as faces
|
|
|
|
#return uvcoords_dict
|
|
|
|
return
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
# ========================
|
|
|
|
# === Read uvmapping 2 ===
|
|
|
|
# ========================
|
2006-01-29 19:17:53 +00:00
|
|
|
def read_vmad(uvcoords_dict, facesuv_dict, maxfacenum, maxvertnum, lwochunk):
|
2006-06-29 07:06:54 +00:00
|
|
|
if maxvertnum == 0 or maxfacenum == 0:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Found VMAD but no vertexes to map!"
|
2006-06-29 07:06:54 +00:00
|
|
|
return uvcoords_dict, facesuv_dict
|
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
map_type = data.read(4)
|
|
|
|
if map_type != "TXUV":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Reading VMAD: No Texture UV map Were Found. Map Type: %s" % map_type
|
2006-06-29 07:06:54 +00:00
|
|
|
return uvcoords_dict, facesuv_dict
|
|
|
|
dimension, = struct.unpack(">H", data.read(2))
|
|
|
|
name, i = read_name(data) #i initialized with string lenght + zeros
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "TXUV %d %s" % (dimension, name)
|
2006-06-29 07:06:54 +00:00
|
|
|
try: #if uvcoords_dict.has_key(name):
|
|
|
|
my_uv_dict = uvcoords_dict[name] #update existing
|
|
|
|
except: #else:
|
|
|
|
my_uv_dict = {} #start a brand new: this could be made more smart
|
|
|
|
my_facesuv_list = []
|
|
|
|
newindex = maxvertnum + 10 #why +10? Why not?
|
|
|
|
#end variable initialization
|
2007-02-08 00:27:01 +00:00
|
|
|
Vector = Blender.Mathutils.Vector
|
2006-06-29 07:06:54 +00:00
|
|
|
while (i < lwochunk.chunksize - 6): #4+2 header bytes already read
|
|
|
|
vertnum, vnum_size = read_vx(data)
|
|
|
|
i += vnum_size
|
|
|
|
polynum, vnum_size = read_vx(data)
|
|
|
|
i += vnum_size
|
2006-06-30 11:22:45 +00:00
|
|
|
uv = struct.unpack(">ff", data.read(8))
|
2006-06-29 07:06:54 +00:00
|
|
|
if polynum >= maxfacenum or vertnum >= maxvertnum:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Hem: more uvmap than vertexes? ignorig uv data for vertex %d" % vertnum
|
|
|
|
pass
|
2006-06-29 07:06:54 +00:00
|
|
|
else:
|
2007-02-08 00:27:01 +00:00
|
|
|
my_uv_dict[newindex] = Vector(uv)
|
2006-06-29 07:06:54 +00:00
|
|
|
my_facesuv_list.append([polynum, vertnum, newindex])
|
|
|
|
newindex += 1
|
|
|
|
i += 8
|
|
|
|
#end loop on uv pairs
|
|
|
|
uvcoords_dict[name] = my_uv_dict
|
|
|
|
facesuv_dict[name] = my_facesuv_list
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "updated %d vertexes data" % (newindex-maxvertnum-10)
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# =================
|
|
|
|
# === Read tags ===
|
|
|
|
# =================
|
|
|
|
def read_tags(lwochunk):
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
tag_list = []
|
2006-06-30 11:22:45 +00:00
|
|
|
current_tag = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
i = 0
|
|
|
|
while i < lwochunk.chunksize:
|
|
|
|
char = data.read(1)
|
|
|
|
if char == "\0":
|
|
|
|
tag_list.append(current_tag)
|
|
|
|
if (len(current_tag) % 2 == 0): char = data.read(1)
|
2006-06-30 11:22:45 +00:00
|
|
|
current_tag = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
else:
|
|
|
|
current_tag += char
|
|
|
|
i += 1
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "read %d tags, list follows: %s" % (len(tag_list), tag_list)
|
2006-06-29 07:06:54 +00:00
|
|
|
return tag_list
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ==================
|
|
|
|
# === Read Ptags ===
|
|
|
|
# ==================
|
|
|
|
def read_ptags(lwochunk, tag_list):
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
polygon_type = data.read(4)
|
|
|
|
if polygon_type != "SURF":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "No Surf Were Found. Polygon Type: %s" % polygon_type
|
2006-06-29 07:06:54 +00:00
|
|
|
return {}
|
|
|
|
ptag_dict = {}
|
|
|
|
i = 0
|
|
|
|
while(i < lwochunk.chunksize-4): #4 bytes polygon type already read
|
2006-06-30 11:22:45 +00:00
|
|
|
#if not i%1000 and my_meshtools.show_progress:
|
|
|
|
# Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading PTAGS")
|
2006-06-29 07:06:54 +00:00
|
|
|
poln, poln_size = read_vx(data)
|
|
|
|
i += poln_size
|
|
|
|
tag_index, = struct.unpack(">H", data.read(2))
|
|
|
|
if tag_index > (len(tag_list)):
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Reading PTAG: Surf belonging to undefined TAG: %d. Skipping" % tag_index
|
2006-06-29 07:06:54 +00:00
|
|
|
return {}
|
|
|
|
i += 2
|
|
|
|
tag_key = tag_list[tag_index]
|
2006-06-30 11:22:45 +00:00
|
|
|
try:
|
2006-06-29 07:06:54 +00:00
|
|
|
ptag_dict[tag_list[tag_index]].append(poln)
|
2006-06-30 11:22:45 +00:00
|
|
|
except: #if not(ptag_dict.has_key(tag_key)):
|
2006-06-29 07:06:54 +00:00
|
|
|
ptag_dict[tag_list[tag_index]] = [poln]
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: for i, ptag_dict_val in ptag_dict.iteritems(): print "read %d polygons belonging to TAG %s" % (len(ptag_dict_val ), i)
|
2006-06-29 07:06:54 +00:00
|
|
|
return ptag_dict
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ==================
|
|
|
|
# === Read Clips ===
|
|
|
|
# ==================
|
2006-01-29 19:17:53 +00:00
|
|
|
def read_clip(lwochunk, dir_part):
|
|
|
|
# img, IMG, g_IMG refers to blender image objects
|
|
|
|
# ima, IMAG, g_IMAG refers to clip dictionary 'ID' entries: refer to blok and surf
|
2006-06-29 07:06:54 +00:00
|
|
|
clip_dict = {}
|
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
2008-01-24 20:31:11 +00:00
|
|
|
data_str = data.read(4)
|
|
|
|
if len(data_str) < 4: # can be zero also??? :/
|
|
|
|
# Should not happen but lw can import so we should too
|
|
|
|
return
|
|
|
|
|
2008-01-25 20:44:36 +00:00
|
|
|
image_index, = struct.unpack(">L", data_str)
|
2006-06-29 07:06:54 +00:00
|
|
|
clip_dict['ID'] = image_index
|
|
|
|
i = 4
|
|
|
|
while(i < lwochunk.chunksize):
|
|
|
|
subchunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
subchunklen, = struct.unpack(">H", data.read(2))
|
|
|
|
if subchunkname == "STIL":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- STIL"
|
2006-06-29 07:06:54 +00:00
|
|
|
clip_name, k = read_name(data)
|
|
|
|
#now split text independently from platform
|
|
|
|
#depend on the system where image was saved. NOT the one where the script is run
|
|
|
|
no_sep = "\\"
|
|
|
|
if Blender.sys.sep == no_sep: no_sep ="/"
|
|
|
|
if (no_sep in clip_name):
|
|
|
|
clip_name = clip_name.replace(no_sep, Blender.sys.sep)
|
|
|
|
short_name = Blender.sys.basename(clip_name)
|
2007-04-04 09:11:03 +00:00
|
|
|
if clip_name == "" or short_name == "":
|
|
|
|
###if DEBUG: print "Reading CLIP: Empty clip name not allowed. Skipping"
|
2006-06-29 07:06:54 +00:00
|
|
|
discard = data.read(subchunklen-k)
|
|
|
|
clip_dict['NAME'] = clip_name
|
|
|
|
clip_dict['BASENAME'] = short_name
|
|
|
|
elif subchunkname == "XREF": #cross reference another image
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- XREF"
|
2006-06-29 07:06:54 +00:00
|
|
|
image_index, = struct.unpack(">L", data.read(4))
|
|
|
|
clip_name, k = read_name(data)
|
|
|
|
clip_dict['NAME'] = clip_name
|
|
|
|
clip_dict['XREF'] = image_index
|
|
|
|
elif subchunkname == "NEGA": #negate texture effect
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- NEGA"
|
2006-06-29 07:06:54 +00:00
|
|
|
n, = struct.unpack(">H", data.read(2))
|
|
|
|
clip_dict['NEGA'] = n
|
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- CLIP:%s: skipping" % subchunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
discard = data.read(subchunklen)
|
|
|
|
i = i + 6 + subchunklen
|
|
|
|
#end loop on surf chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "read image:%s" % clip_dict
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'XREF' in clip_dict: # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Cross-reference: no image pre-allocated."
|
2006-06-29 07:06:54 +00:00
|
|
|
return clip_dict
|
|
|
|
#look for images
|
2006-06-30 11:22:45 +00:00
|
|
|
#img = load_image("",clip_dict['NAME'])
|
|
|
|
NAME= BASENAME= None
|
|
|
|
|
|
|
|
try:
|
|
|
|
NAME= clip_dict['NAME']
|
|
|
|
BASENAME= clip_dict['BASENAME']
|
|
|
|
except:
|
|
|
|
clip_dict['g_IMG'] = None
|
|
|
|
return
|
2007-04-04 09:11:03 +00:00
|
|
|
# ###if DEBUG: print 'test', NAME, BASENAME
|
2007-04-21 11:23:45 +00:00
|
|
|
img = BPyImage.comprehensiveImageLoad(NAME, dir_part, PLACE_HOLDER= False, RECURSIVE=False)
|
2006-06-30 11:22:45 +00:00
|
|
|
if not img:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "***No image %s found: trying LWO file subdir" % NAME
|
2007-04-21 11:23:45 +00:00
|
|
|
img = BPyImage.comprehensiveImageLoad(BASENAME, dir_part, PLACE_HOLDER= False, RECURSIVE=False)
|
2007-02-08 00:27:01 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: if not img: print "***No image %s found: giving up" % BASENAME
|
2006-06-29 07:06:54 +00:00
|
|
|
#lucky we are: we have an image
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Image pre-allocated."
|
2006-06-29 07:06:54 +00:00
|
|
|
clip_dict['g_IMG'] = img
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
return clip_dict
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ===========================
|
|
|
|
# === Read Surfaces Block ===
|
|
|
|
# ===========================
|
|
|
|
def read_surfblok(subchunkdata):
|
2006-06-29 07:06:54 +00:00
|
|
|
lenght = len(subchunkdata)
|
|
|
|
my_dict = {}
|
2006-06-30 11:22:45 +00:00
|
|
|
my_uvname = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(subchunkdata)
|
|
|
|
##############################################################
|
|
|
|
# blok header sub-chunk
|
|
|
|
##############################################################
|
|
|
|
subchunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
subchunklen, = struct.unpack(">h", data.read(2))
|
|
|
|
accumulate_i = subchunklen + 6
|
|
|
|
if subchunkname != 'IMAP':
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- SURF: BLOK: %s: block aborting" % subchunkname
|
2006-06-30 11:22:45 +00:00
|
|
|
return {}, ""
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- IMAP"
|
2006-06-29 07:06:54 +00:00
|
|
|
ordinal, i = read_name(data)
|
|
|
|
my_dict['ORD'] = ordinal
|
|
|
|
#my_dict['g_ORD'] = -1
|
|
|
|
my_dict['ENAB'] = True
|
|
|
|
while(i < subchunklen): # ---------left 6------------------------- loop on header parameters
|
|
|
|
sub2chunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
sub2chunklen, = struct.unpack(">h", data.read(2))
|
|
|
|
i = i + 6 + sub2chunklen
|
|
|
|
if sub2chunkname == "CHAN":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ CHAN"
|
2006-06-29 07:06:54 +00:00
|
|
|
sub2chunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
my_dict['CHAN'] = sub2chunkname
|
|
|
|
sub2chunklen -= 4
|
|
|
|
elif sub2chunkname == "ENAB": #only present if is to be disabled
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ ENAB"
|
2006-06-29 07:06:54 +00:00
|
|
|
ena, = struct.unpack(">h", data.read(2))
|
|
|
|
my_dict['ENAB'] = ena
|
|
|
|
sub2chunklen -= 2
|
|
|
|
elif sub2chunkname == "NEGA": #only present if is to be enabled
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ NEGA"
|
2006-06-29 07:06:54 +00:00
|
|
|
ena, = struct.unpack(">h", data.read(2))
|
|
|
|
if ena == 1:
|
|
|
|
my_dict['NEGA'] = ena
|
|
|
|
sub2chunklen -= 2
|
|
|
|
elif sub2chunkname == "OPAC": #only present if is to be disabled
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ OPAC"
|
2006-06-29 07:06:54 +00:00
|
|
|
opa, = struct.unpack(">h", data.read(2))
|
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['OPAC'] = opa
|
|
|
|
my_dict['OPACVAL'] = s
|
|
|
|
sub2chunklen -= 6
|
|
|
|
elif sub2chunkname == "AXIS":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ AXIS"
|
2006-06-29 07:06:54 +00:00
|
|
|
ena, = struct.unpack(">h", data.read(2))
|
|
|
|
my_dict['DISPLAXIS'] = ena
|
|
|
|
sub2chunklen -= 2
|
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ SURF: BLOK: IMAP: %s: skipping" % sub2chunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
discard = data.read(sub2chunklen)
|
|
|
|
#end loop on blok header subchunks
|
|
|
|
##############################################################
|
|
|
|
# blok attributes sub-chunk
|
|
|
|
##############################################################
|
|
|
|
subchunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
subchunklen, = struct.unpack(">h", data.read(2))
|
|
|
|
accumulate_i += subchunklen + 6
|
|
|
|
if subchunkname != 'TMAP':
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- SURF: BLOK: %s: block aborting" % subchunkname
|
2006-06-30 11:22:45 +00:00
|
|
|
return {}, ""
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- TMAP"
|
2006-06-29 07:06:54 +00:00
|
|
|
i = 0
|
|
|
|
while(i < subchunklen): # -----------left 6----------------------- loop on header parameters
|
|
|
|
sub2chunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
sub2chunklen, = struct.unpack(">h", data.read(2))
|
|
|
|
i = i + 6 + sub2chunklen
|
|
|
|
if sub2chunkname == "CNTR":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ CNTR"
|
2006-06-29 07:06:54 +00:00
|
|
|
x, y, z = struct.unpack(">fff", data.read(12))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['CNTR'] = [x, y, z]
|
|
|
|
sub2chunklen -= (12+env_size)
|
|
|
|
elif sub2chunkname == "SIZE":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ SIZE"
|
2006-06-29 07:06:54 +00:00
|
|
|
x, y, z = struct.unpack(">fff", data.read(12))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['SIZE'] = [x, y, z]
|
|
|
|
sub2chunklen -= (12+env_size)
|
|
|
|
elif sub2chunkname == "ROTA":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ ROTA"
|
2006-06-29 07:06:54 +00:00
|
|
|
x, y, z = struct.unpack(">fff", data.read(12))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['ROTA'] = [x, y, z]
|
|
|
|
sub2chunklen -= (12+env_size)
|
|
|
|
elif sub2chunkname == "CSYS":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ CSYS"
|
2006-06-29 07:06:54 +00:00
|
|
|
ena, = struct.unpack(">h", data.read(2))
|
|
|
|
my_dict['CSYS'] = ena
|
|
|
|
sub2chunklen -= 2
|
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "------------ SURF: BLOK: TMAP: %s: skipping" % sub2chunkname
|
|
|
|
pass
|
2006-06-29 07:06:54 +00:00
|
|
|
if sub2chunklen > 0:
|
|
|
|
discard = data.read(sub2chunklen)
|
|
|
|
#end loop on blok attributes subchunks
|
|
|
|
##############################################################
|
|
|
|
# ok, now other attributes without sub_chunks
|
|
|
|
##############################################################
|
|
|
|
while(accumulate_i < lenght): # ---------------------------------- loop on header parameters: lenght has already stripped the 6 bypes header
|
|
|
|
subchunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
subchunklen, = struct.unpack(">H", data.read(2))
|
|
|
|
accumulate_i = accumulate_i + 6 + subchunklen
|
|
|
|
if subchunkname == "PROJ":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- PROJ"
|
2006-06-29 07:06:54 +00:00
|
|
|
p, = struct.unpack(">h", data.read(2))
|
|
|
|
my_dict['PROJ'] = p
|
|
|
|
subchunklen -= 2
|
|
|
|
elif subchunkname == "AXIS":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- AXIS"
|
2006-06-29 07:06:54 +00:00
|
|
|
a, = struct.unpack(">h", data.read(2))
|
|
|
|
my_dict['MAJAXIS'] = a
|
|
|
|
subchunklen -= 2
|
|
|
|
elif subchunkname == "IMAG":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- IMAG"
|
2006-06-29 07:06:54 +00:00
|
|
|
i, i_size = read_vx(data)
|
|
|
|
my_dict['IMAG'] = i
|
|
|
|
subchunklen -= i_size
|
|
|
|
elif subchunkname == "WRAP":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- WRAP"
|
2006-06-29 07:06:54 +00:00
|
|
|
ww, wh = struct.unpack(">hh", data.read(4))
|
|
|
|
#reduce width and height to just 1 parameter for both
|
|
|
|
my_dict['WRAP'] = max([ww,wh])
|
|
|
|
#my_dict['WRAPWIDTH'] = ww
|
|
|
|
#my_dict['WRAPHEIGHT'] = wh
|
|
|
|
subchunklen -= 4
|
|
|
|
elif subchunkname == "WRPW":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- WRPW"
|
2006-06-29 07:06:54 +00:00
|
|
|
w, = struct.unpack(">f", data.read(4))
|
|
|
|
my_dict['WRPW'] = w
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
subchunklen -= (env_size+4)
|
|
|
|
elif subchunkname == "WRPH":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- WRPH"
|
2006-06-29 07:06:54 +00:00
|
|
|
w, = struct.unpack(">f", data.read(4))
|
|
|
|
my_dict['WRPH'] = w
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
subchunklen -= (env_size+4)
|
|
|
|
elif subchunkname == "VMAP":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- VMAP"
|
2006-06-29 07:06:54 +00:00
|
|
|
vmp, i = read_name(data)
|
|
|
|
my_dict['VMAP'] = vmp
|
|
|
|
my_uvname = vmp
|
|
|
|
subchunklen -= i
|
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "---------- SURF: BLOK: %s: skipping" % subchunkname
|
|
|
|
pass
|
2006-06-29 07:06:54 +00:00
|
|
|
if subchunklen > 0:
|
|
|
|
discard = data.read(subchunklen)
|
|
|
|
#end loop on blok subchunks
|
|
|
|
return my_dict, my_uvname
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# =====================
|
|
|
|
# === Read Surfaces ===
|
|
|
|
# =====================
|
|
|
|
def read_surfs(lwochunk, surf_list, tag_list):
|
2006-06-29 07:06:54 +00:00
|
|
|
my_dict = {}
|
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
surf_name, i = read_name(data)
|
|
|
|
parent_name, j = read_name(data)
|
|
|
|
i += j
|
2006-06-30 11:22:45 +00:00
|
|
|
if (surf_name == "") or not(surf_name in tag_list):
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Reading SURF: Actually empty surf name not allowed. Skipping"
|
2006-06-29 07:06:54 +00:00
|
|
|
return {}
|
2006-06-30 11:22:45 +00:00
|
|
|
if (parent_name != ""):
|
2006-06-29 07:06:54 +00:00
|
|
|
parent_index = [x['NAME'] for x in surf_list].count(parent_name)
|
|
|
|
if parent_index >0:
|
|
|
|
my_dict = surf_list[parent_index-1]
|
|
|
|
my_dict['NAME'] = surf_name
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Surface data for TAG %s" % surf_name
|
2006-06-29 07:06:54 +00:00
|
|
|
while(i < lwochunk.chunksize):
|
|
|
|
subchunkname, = struct.unpack("4s", data.read(4))
|
|
|
|
subchunklen, = struct.unpack(">H", data.read(2))
|
|
|
|
i = i + 6 + subchunklen #6 bytes subchunk header
|
|
|
|
if subchunkname == "COLR": #color: mapped on color
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- COLR"
|
2006-06-29 07:06:54 +00:00
|
|
|
r, g, b = struct.unpack(">fff", data.read(12))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['COLR'] = [r, g, b]
|
|
|
|
subchunklen -= (12+env_size)
|
|
|
|
elif subchunkname == "DIFF": #diffusion: mapped on reflection (diffuse shader)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- DIFF"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['DIFF'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "SPEC": #specularity: mapped to specularity (spec shader)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- SPEC"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['SPEC'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "REFL": #reflection: mapped on raymirror
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- REFL"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['REFL'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "TRNL": #translucency: mapped on same param
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- TRNL"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['TRNL'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "GLOS": #glossiness: mapped on specularity hardness (spec shader)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- GLOS"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['GLOS'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "TRAN": #transparency: inverted and mapped on alpha channel
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- TRAN"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['TRAN'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "LUMI": #luminosity: mapped on emit channel
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- LUMI"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['LUMI'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "GVAL": #glow: mapped on add channel
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- GVAL"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['GVAL'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "SMAN": #smoothing angle
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- SMAN"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
my_dict['SMAN'] = s
|
|
|
|
subchunklen -= 4
|
|
|
|
elif subchunkname == "SIDE": #double sided?
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- SIDE" #if 1 side do not define key
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">H", data.read(2))
|
|
|
|
if s == 3:
|
|
|
|
my_dict['SIDE'] = s
|
|
|
|
subchunklen -= 2
|
|
|
|
elif subchunkname == "RIND": #Refraction: mapped on IOR
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- RIND"
|
2006-06-29 07:06:54 +00:00
|
|
|
s, = struct.unpack(">f", data.read(4))
|
|
|
|
envelope, env_size = read_vx(data)
|
|
|
|
my_dict['RIND'] = s
|
|
|
|
subchunklen -= (4+env_size)
|
|
|
|
elif subchunkname == "BLOK": #blocks
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-------- BLOK"
|
2006-06-29 07:06:54 +00:00
|
|
|
rr, uvname = read_surfblok(data.read(subchunklen))
|
|
|
|
#paranoia setting: preventing adding an empty dict
|
2006-06-30 11:22:45 +00:00
|
|
|
if rr: # != {}
|
2006-06-29 07:06:54 +00:00
|
|
|
try:
|
|
|
|
my_dict['BLOK'].append(rr)
|
|
|
|
except:
|
|
|
|
my_dict['BLOK'] = [rr]
|
2006-06-30 11:22:45 +00:00
|
|
|
|
|
|
|
if uvname: # != "":
|
2006-06-29 07:06:54 +00:00
|
|
|
my_dict['UVNAME'] = uvname #theoretically there could be a number of them: only one used per surf
|
2007-07-30 02:24:17 +00:00
|
|
|
# all are dictionaries - so testing keys
|
|
|
|
if not('g_IMAG' in my_dict) and ('CHAN' in rr) and ('OPAC' in rr) and ('IMAG' in rr):
|
2006-06-29 07:06:54 +00:00
|
|
|
if (rr['CHAN'] == 'COLR') and (rr['OPAC'] == 0):
|
|
|
|
my_dict['g_IMAG'] = rr['IMAG'] #do not set anything, just save image object for later assignment
|
|
|
|
subchunklen = 0 #force ending
|
|
|
|
else: # Misc Chunks
|
2007-04-04 09:11:03 +00:00
|
|
|
pass
|
|
|
|
###if DEBUG: print "-------- SURF:%s: skipping" % subchunkname
|
2006-06-29 07:06:54 +00:00
|
|
|
if subchunklen > 0:
|
|
|
|
discard = data.read(subchunklen)
|
|
|
|
#end loop on surf chunks
|
2006-06-30 11:22:45 +00:00
|
|
|
try:#if my_dict.has_key('BLOK'):
|
2006-06-29 07:06:54 +00:00
|
|
|
my_dict['BLOK'].reverse() #texture applied in reverse order with respect to reading from lwo
|
|
|
|
except:
|
2006-06-30 11:22:45 +00:00
|
|
|
pass
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
#uncomment this if material pre-allocated by read_surf
|
2007-04-18 14:40:01 +00:00
|
|
|
my_dict['g_MAT'] = bpy.data.materials.new(my_dict['NAME'])
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "-> Material pre-allocated."
|
2006-06-29 07:06:54 +00:00
|
|
|
return my_dict
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
# =========================
|
|
|
|
# === Recalculate Faces ===
|
|
|
|
# =========================
|
|
|
|
|
2006-01-29 19:17:53 +00:00
|
|
|
def get_uvface(complete_list, facenum):
|
2006-06-29 07:06:54 +00:00
|
|
|
# extract from the complete list only vertexes of the desired polygon
|
2006-06-30 11:22:45 +00:00
|
|
|
'''
|
2006-06-29 07:06:54 +00:00
|
|
|
my_facelist = []
|
|
|
|
for elem in complete_list:
|
|
|
|
if elem[0] == facenum:
|
|
|
|
my_facelist.append(elem)
|
|
|
|
return my_facelist
|
2006-06-30 11:22:45 +00:00
|
|
|
'''
|
|
|
|
return [elem for elem in complete_list if elem[0] == facenum]
|
2006-01-29 19:17:53 +00:00
|
|
|
|
|
|
|
def get_newindex(polygon_list, vertnum):
|
2006-06-29 07:06:54 +00:00
|
|
|
# extract from the polygon list the new index associated to a vertex
|
2006-06-30 11:22:45 +00:00
|
|
|
if not polygon_list: # == []
|
2006-06-29 07:06:54 +00:00
|
|
|
return -1
|
|
|
|
for elem in polygon_list:
|
|
|
|
if elem[1] == vertnum:
|
|
|
|
return elem[2]
|
2007-04-04 09:11:03 +00:00
|
|
|
# ###if DEBUG: print "WARNING: expected vertex %s for polygon %s. Polygon_list dump follows" % (vertnum, polygon_list[0][0])
|
|
|
|
# ###if DEBUG: print polygon_list
|
2006-06-29 07:06:54 +00:00
|
|
|
return -1
|
2006-01-29 19:17:53 +00:00
|
|
|
|
|
|
|
def get_surf(surf_list, cur_tag):
|
2006-06-30 11:22:45 +00:00
|
|
|
for elem in surf_list: # elem can be None
|
|
|
|
if elem and elem['NAME'] == cur_tag:
|
2006-06-29 07:06:54 +00:00
|
|
|
return elem
|
|
|
|
return {}
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2006-01-29 19:17:53 +00:00
|
|
|
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
# ====================================
|
|
|
|
# === Modified Create Blender Mesh ===
|
|
|
|
# ====================================
|
2006-01-29 19:17:53 +00:00
|
|
|
def my_create_mesh(clip_list, surf, objspec_list, current_facelist, objname, not_used_faces):
|
2006-06-29 07:06:54 +00:00
|
|
|
#take the needed faces and update the not-used face list
|
|
|
|
complete_vertlist = objspec_list[2]
|
|
|
|
complete_facelist = objspec_list[3]
|
|
|
|
uvcoords_dict = objspec_list[7]
|
|
|
|
facesuv_dict = objspec_list[8]
|
|
|
|
vertex_map = {} #implementation as dict
|
|
|
|
cur_ptag_faces = []
|
|
|
|
cur_ptag_faces_indexes = []
|
|
|
|
maxface = len(complete_facelist)
|
|
|
|
for ff in current_facelist:
|
|
|
|
if ff >= maxface:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Non existent face addressed: Giving up with this object"
|
2006-06-29 07:06:54 +00:00
|
|
|
return None, not_used_faces #return the created object
|
|
|
|
cur_face = complete_facelist[ff]
|
|
|
|
cur_ptag_faces_indexes.append(ff)
|
2006-06-30 11:22:45 +00:00
|
|
|
if not_used_faces: # != []
|
|
|
|
not_used_faces[ff] = -1
|
2006-06-29 07:06:54 +00:00
|
|
|
for vv in cur_face: vertex_map[vv] = 1
|
|
|
|
#end loop on faces
|
|
|
|
store_edge = 0
|
|
|
|
|
2007-04-18 14:40:01 +00:00
|
|
|
scn= bpy.data.scenes.active
|
|
|
|
msh = bpy.data.meshes.new()
|
2007-02-08 00:27:01 +00:00
|
|
|
obj = scn.objects.new(msh)
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
mat = None
|
2006-06-30 11:22:45 +00:00
|
|
|
try:
|
2007-02-08 00:27:01 +00:00
|
|
|
msh.materials = [surf['g_MAT']]
|
2006-06-29 07:06:54 +00:00
|
|
|
except:
|
|
|
|
pass
|
2007-02-08 00:27:01 +00:00
|
|
|
|
|
|
|
msh.mode |= Blender.Mesh.Modes.AUTOSMOOTH #smooth it anyway
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'SMAN' in surf: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
#not allowed mixed mode mesh (all the mesh is smoothed and all with the same angle)
|
|
|
|
#only one smoothing angle will be active! => take the max one
|
2007-02-08 00:27:01 +00:00
|
|
|
msh.degr = min(80, int(surf['SMAN']/3.1415926535897932384626433832795*180.0)) #lwo in radians - blender in degrees
|
2006-06-30 11:22:45 +00:00
|
|
|
|
|
|
|
try:
|
2007-02-08 00:27:01 +00:00
|
|
|
img= lookup_imag(clip_list, surf['g_IMAG'])['g_IMG']
|
2006-06-29 07:06:54 +00:00
|
|
|
except:
|
2006-06-30 11:22:45 +00:00
|
|
|
img= None
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
#uv_flag = ((surf.has_key('UVNAME')) and (uvcoords_dict.has_key(surf['UVNAME'])) and (img != None))
|
2007-07-30 02:24:17 +00:00
|
|
|
uv_flag = (('UVNAME' in surf) and (surf['UVNAME'] in uvcoords_dict))
|
2006-06-29 07:06:54 +00:00
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "\n#===================================================================#"
|
|
|
|
###if DEBUG: print "Processing Object: %s" % objname
|
|
|
|
###if DEBUG: print "#===================================================================#"
|
2007-02-08 00:27:01 +00:00
|
|
|
|
|
|
|
if uv_flag:
|
|
|
|
msh.verts.extend([(0.0,0.0,0.0),])
|
|
|
|
j = 1
|
|
|
|
else:
|
|
|
|
j = 0
|
|
|
|
|
|
|
|
def tmp_get_vert(k, i):
|
|
|
|
vertex_map[k] = i+j # j is the dummy vert
|
2007-04-04 09:11:03 +00:00
|
|
|
# ###if DEBUG: print complete_vertlist[i]
|
2007-02-08 00:27:01 +00:00
|
|
|
return complete_vertlist[k]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
msh.verts.extend([tmp_get_vert(k, i) for i, k in enumerate(vertex_map.iterkeys())])
|
2007-04-04 09:11:03 +00:00
|
|
|
msh.transform(TXMTX) # faster then applying while reading.
|
2006-06-29 07:06:54 +00:00
|
|
|
#end sweep over vertexes
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
#append faces
|
2007-02-08 00:27:01 +00:00
|
|
|
FACE_TEX= Blender.Mesh.FaceModes.TEX
|
|
|
|
FACE_ALPHA= Blender.Mesh.FaceTranspModes.ALPHA
|
|
|
|
EDGE_DRAW_FLAG= Blender.Mesh.EdgeFlags.EDGEDRAW | Blender.Mesh.EdgeFlags.EDGERENDER
|
|
|
|
|
|
|
|
|
|
|
|
edges = []
|
|
|
|
face_data = [] # [(indicies, material, uvs, image), ]
|
|
|
|
face_uvs = []
|
|
|
|
edges_fgon = []
|
|
|
|
|
|
|
|
if uv_flag:
|
|
|
|
uvcoords_dict_context = uvcoords_dict[surf['UVNAME']]
|
|
|
|
try: current_uvdict = facesuv_dict[surf['UVNAME']]
|
|
|
|
except: current_uvdict = None
|
2007-04-21 11:23:45 +00:00
|
|
|
|
2007-02-08 00:27:01 +00:00
|
|
|
default_uv = Blender.Mathutils.Vector(0,0)
|
|
|
|
def tmp_get_face_uvs(cur_face, i):
|
|
|
|
uvs = []
|
|
|
|
if current_uvdict:
|
|
|
|
uvface = get_uvface(current_uvdict,i)
|
|
|
|
for vi in cur_face:
|
|
|
|
ni = get_newindex(uvface, vi)
|
|
|
|
if ni == -1: ni = vi
|
|
|
|
|
|
|
|
try:
|
|
|
|
uvs.append(uvcoords_dict_context[ ni ])
|
|
|
|
except:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print '\tWarning, Corrupt UVs'
|
2007-02-08 00:27:01 +00:00
|
|
|
uvs.append(default_uv)
|
|
|
|
else:
|
|
|
|
for vi in cur_face:
|
2007-04-21 11:23:45 +00:00
|
|
|
try:
|
|
|
|
uvs.append(uvcoords_dict_context[ vi ])
|
|
|
|
except:
|
|
|
|
###if DEBUG: print '\tWarning, Corrupt UVs'
|
|
|
|
uvs.append(default_uv)
|
2007-02-08 00:27:01 +00:00
|
|
|
|
|
|
|
return uvs
|
2007-07-30 02:24:17 +00:00
|
|
|
cur_face
|
2006-06-29 07:06:54 +00:00
|
|
|
for i in cur_ptag_faces_indexes:
|
|
|
|
cur_face = complete_facelist[i]
|
|
|
|
numfaceverts = len(cur_face)
|
2007-02-08 00:27:01 +00:00
|
|
|
|
|
|
|
if numfaceverts == 2: edges.append((vertex_map[cur_face[0]], vertex_map[cur_face[1]]))
|
2007-07-30 02:24:17 +00:00
|
|
|
elif numfaceverts == 3 or numfaceverts == 4:
|
|
|
|
rev_face = [__i for __i in reversed(cur_face)]
|
2007-02-08 00:27:01 +00:00
|
|
|
face_data.append( [vertex_map[j] for j in rev_face] )
|
|
|
|
if uv_flag: face_uvs.append(tmp_get_face_uvs(rev_face, i))
|
2007-07-30 02:24:17 +00:00
|
|
|
elif numfaceverts > 4:
|
2007-08-01 10:19:16 +00:00
|
|
|
meta_faces= BPyMesh.ngon(complete_vertlist, cur_face, PREF_FIX_LOOPS= True)
|
2007-02-08 00:27:01 +00:00
|
|
|
edge_face_count = {}
|
2006-06-29 07:06:54 +00:00
|
|
|
for mf in meta_faces:
|
2007-07-30 02:24:17 +00:00
|
|
|
# These will always be tri's since they are scanfill faces
|
|
|
|
mf = cur_face[mf[2]], cur_face[mf[1]], cur_face[mf[0]]
|
|
|
|
face_data.append( [vertex_map[j] for j in mf] )
|
2007-02-08 00:27:01 +00:00
|
|
|
|
2007-07-30 02:24:17 +00:00
|
|
|
if uv_flag: face_uvs.append(tmp_get_face_uvs(mf, i))
|
2007-02-08 00:27:01 +00:00
|
|
|
|
2007-07-30 02:24:17 +00:00
|
|
|
#if USE_FGON:
|
|
|
|
if len(meta_faces) > 1:
|
|
|
|
mf = face_data[-1] # reuse mf
|
|
|
|
for j in xrange(3):
|
|
|
|
v1= mf[j]
|
|
|
|
v2= mf[j-1]
|
|
|
|
if v1!=v2:
|
|
|
|
if v1>v2:
|
|
|
|
v2,v1= v1,v2
|
|
|
|
try:
|
|
|
|
edge_face_count[v1,v2]+= 1
|
|
|
|
except:
|
|
|
|
edge_face_count[v1,v2]= 0
|
|
|
|
|
|
|
|
|
2006-07-02 09:44:44 +00:00
|
|
|
|
2007-02-08 00:27:01 +00:00
|
|
|
if edge_face_count:
|
|
|
|
edges_fgon.extend( [vert_key for vert_key, count in edge_face_count.iteritems() if count] )
|
|
|
|
|
2007-07-30 02:24:17 +00:00
|
|
|
if edges:
|
|
|
|
msh.edges.extend(edges)
|
|
|
|
|
2007-02-08 00:27:01 +00:00
|
|
|
face_mapping_removed = msh.faces.extend(face_data, indexList=True)
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'TRAN' in surf or (mat and mat.alpha<1.0): # incase mat is null
|
2007-02-08 00:27:01 +00:00
|
|
|
transp_flag = True
|
|
|
|
else:
|
|
|
|
transp_flag = False
|
|
|
|
|
|
|
|
if uv_flag:
|
|
|
|
msh.faceUV = True
|
|
|
|
msh_faces= msh.faces
|
|
|
|
for i, uvs in enumerate(face_uvs):
|
|
|
|
i_mapped = face_mapping_removed[i]
|
|
|
|
if i_mapped != None:
|
|
|
|
f = msh_faces[i_mapped]
|
|
|
|
f.uv = uvs
|
|
|
|
if img:
|
|
|
|
f.image = img
|
|
|
|
|
|
|
|
if transp_flag: f.transp |= FACE_ALPHA
|
|
|
|
|
|
|
|
if edges_fgon:
|
|
|
|
msh_edges = msh.edges
|
|
|
|
FGON= Blender.Mesh.EdgeFlags.FGON
|
|
|
|
edges_fgon = msh.findEdges( edges_fgon )
|
|
|
|
if type(edges_fgon) != list: edges_fgon = [edges_fgon]
|
|
|
|
for ed in edges_fgon:
|
|
|
|
if ed!=None:
|
|
|
|
msh_edges[ed].flag |= FGON
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
if not(uv_flag): #clear eventual UV data
|
2007-02-08 00:27:01 +00:00
|
|
|
msh.faceUV = False
|
|
|
|
|
|
|
|
if uv_flag:
|
|
|
|
msh.verts.delete([0,])
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
return obj, not_used_faces #return the created object
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ============================================
|
|
|
|
# === Set Subsurf attributes on given mesh ===
|
|
|
|
# ============================================
|
|
|
|
def set_subsurf(obj):
|
2006-06-30 11:22:45 +00:00
|
|
|
mods = obj.modifiers # get the object's modifiers
|
|
|
|
mod = mods.append(Blender.Modifier.Type.SUBSURF) # add a new subsurf modifier
|
|
|
|
mod[Blender.Modifier.Settings.LEVELS] = 2 # set subsurf subdivision levels to 2
|
|
|
|
mod[Blender.Modifier.Settings.RENDLEVELS] = 2 # set subsurf rendertime subdivision levels to 2
|
2006-06-29 07:06:54 +00:00
|
|
|
obj.makeDisplayList()
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# =================================
|
|
|
|
# === object size and dimension ===
|
|
|
|
# =================================
|
|
|
|
def obj_size_pos(obj):
|
2006-06-29 07:06:54 +00:00
|
|
|
bbox = obj.getBoundBox()
|
|
|
|
bbox_min = map(lambda *row: min(row), *bbox) #transpose & get min
|
|
|
|
bbox_max = map(lambda *row: max(row), *bbox) #transpose & get max
|
|
|
|
obj_size = (bbox_max[0]-bbox_min[0], bbox_max[1]-bbox_min[1], bbox_max[2]-bbox_min[2])
|
|
|
|
obj_pos = ( (bbox_max[0]+bbox_min[0]) / 2, (bbox_max[1]+bbox_min[1]) / 2, (bbox_max[2]+bbox_min[2]) / 2)
|
|
|
|
return (obj_size, obj_pos)
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# =========================
|
|
|
|
# === Create the object ===
|
|
|
|
# =========================
|
2006-01-29 19:17:53 +00:00
|
|
|
def create_objects(clip_list, objspec_list, surf_list):
|
2006-06-29 07:06:54 +00:00
|
|
|
nf = len(objspec_list[3])
|
|
|
|
not_used_faces = range(nf)
|
|
|
|
ptag_dict = objspec_list[5]
|
|
|
|
obj_dict = {} #links tag names to object, used for material assignments
|
|
|
|
obj_dim_dict = {}
|
|
|
|
obj_list = [] #have it handy for parent association
|
|
|
|
middlechar = "+"
|
2006-06-30 11:22:45 +00:00
|
|
|
endchar = ""
|
2006-06-29 07:06:54 +00:00
|
|
|
if (objspec_list[6] == 1):
|
|
|
|
middlechar = endchar = "#"
|
2007-02-08 00:27:01 +00:00
|
|
|
for cur_tag, ptag_dict_val in ptag_dict.iteritems():
|
|
|
|
if ptag_dict_val != []:
|
2006-06-29 07:06:54 +00:00
|
|
|
cur_surf = get_surf(surf_list, cur_tag)
|
2007-02-08 00:27:01 +00:00
|
|
|
cur_obj, not_used_faces= my_create_mesh(clip_list, cur_surf, objspec_list, ptag_dict_val, objspec_list[0][:9]+middlechar+cur_tag[:9], not_used_faces)
|
2006-06-30 11:22:45 +00:00
|
|
|
# Works now with new modifiers
|
|
|
|
if objspec_list[6] == 1:
|
|
|
|
set_subsurf(cur_obj)
|
|
|
|
if cur_obj: # != None
|
2006-06-29 07:06:54 +00:00
|
|
|
obj_dict[cur_tag] = cur_obj
|
|
|
|
obj_dim_dict[cur_tag] = obj_size_pos(cur_obj)
|
|
|
|
obj_list.append(cur_obj)
|
|
|
|
#end loop on current group
|
|
|
|
#and what if some faces not used in any named PTAG? get rid of unused faces
|
|
|
|
orphans = []
|
|
|
|
for tt in not_used_faces:
|
|
|
|
if tt > -1: orphans.append(tt)
|
|
|
|
#end sweep on unused face list
|
|
|
|
not_used_faces = None
|
2006-06-30 11:22:45 +00:00
|
|
|
if orphans: # != []
|
2006-06-29 07:06:54 +00:00
|
|
|
cur_surf = get_surf(surf_list, "_Orphans")
|
|
|
|
cur_obj, not_used_faces = my_create_mesh(clip_list, cur_surf, objspec_list, orphans, objspec_list[0][:9]+middlechar+"Orphans", [])
|
2006-06-30 11:22:45 +00:00
|
|
|
if cur_obj: # != None
|
2006-06-29 07:06:54 +00:00
|
|
|
if objspec_list[6] == 1:
|
|
|
|
set_subsurf(cur_obj)
|
|
|
|
obj_dict["_Orphans"] = cur_obj
|
|
|
|
obj_dim_dict["_Orphans"] = obj_size_pos(cur_obj)
|
|
|
|
obj_list.append(cur_obj)
|
2006-06-30 11:22:45 +00:00
|
|
|
objspec_list[1]= obj_dict
|
|
|
|
objspec_list[4]= obj_dim_dict
|
2007-04-04 09:11:03 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
# ===========================================
|
|
|
|
# === Lookup for image index in clip_list ===
|
|
|
|
# ===========================================
|
2006-06-30 11:22:45 +00:00
|
|
|
def lookup_imag(clip_list, ima_id):
|
2006-06-29 07:06:54 +00:00
|
|
|
for ii in clip_list:
|
2006-06-30 11:22:45 +00:00
|
|
|
if ii and ii['ID'] == ima_id:
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'XREF' in ii: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
#cross reference - recursively look for images
|
|
|
|
return lookup_imag(clip_list, ii['XREF'])
|
|
|
|
else:
|
|
|
|
return ii
|
|
|
|
return None
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ===================================================
|
|
|
|
# === Create and assign image mapping to material ===
|
|
|
|
# ===================================================
|
2006-01-29 19:17:53 +00:00
|
|
|
def create_blok(surf, mat, clip_list, obj_size, obj_pos):
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
def output_size_ofs(size, pos, blok):
|
|
|
|
#just automate repetitive task
|
2006-06-30 11:22:45 +00:00
|
|
|
# 0 == X, 1 == Y, 2 == Z
|
|
|
|
size_default = [1.0] * 3
|
|
|
|
size2 = [1.0] * 3
|
|
|
|
ofs_default = [0.0] * 3
|
|
|
|
offset = [1.0] * 3
|
|
|
|
axis_default = [Blender.Texture.Proj.X, Blender.Texture.Proj.Y, Blender.Texture.Proj.Z]
|
|
|
|
axis = [1.0] * 3
|
2006-06-29 07:06:54 +00:00
|
|
|
c_map_txt = [" X--", " -Y-", " --Z"]
|
2006-06-30 11:22:45 +00:00
|
|
|
c_map = [0,1,2] # standard, good for Z axis projection
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['MAJAXIS'] == 0:
|
2006-06-30 11:22:45 +00:00
|
|
|
c_map = [1,2,0] # X axis projection
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['MAJAXIS'] == 2:
|
2006-06-30 11:22:45 +00:00
|
|
|
c_map = [0,2,1] # Y axis projection
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!axis mapping:"
|
2006-06-30 11:22:45 +00:00
|
|
|
#this is the smart way
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: for mp in c_map: print c_map_txt[mp]
|
2006-06-30 11:22:45 +00:00
|
|
|
|
|
|
|
if blok['SIZE'][0] != 0.0: #paranoia controls
|
|
|
|
size_default[0] = (size[0]/blok['SIZE'][0])
|
|
|
|
ofs_default[0] = ((blok['CNTR'][0]-pos[0])/blok['SIZE'][0])
|
|
|
|
if blok['SIZE'][1] != 0.0:
|
|
|
|
size_default[2] = (size[2]/blok['SIZE'][1])
|
|
|
|
ofs_default[2] = ((blok['CNTR'][1]-pos[2])/blok['SIZE'][1])
|
|
|
|
if blok['SIZE'][2] != 0.0:
|
|
|
|
size_default[1] = (size[1]/blok['SIZE'][2])
|
|
|
|
ofs_default[1] = ((blok['CNTR'][2]-pos[1])/blok['SIZE'][2])
|
|
|
|
|
|
|
|
for mp in xrange(3):
|
|
|
|
axis[mp] = axis_default[c_map[mp]]
|
|
|
|
size2[mp] = size_default[c_map[mp]]
|
|
|
|
offset[mp] = ofs_default[c_map[mp]]
|
|
|
|
if offset[mp]>10.0: offset[mp]-10.0
|
|
|
|
if offset[mp]<-10.0: offset[mp]+10.0
|
|
|
|
# size = [size_default[mp] for mp in c_map]
|
|
|
|
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!texture size and offsets:"
|
|
|
|
###if DEBUG: print " sizeX = %.5f; sizeY = %.5f; sizeZ = %.5f" % (size[0],size[1],size[2])
|
|
|
|
###if DEBUG: print " ofsX = %.5f; ofsY = %.5f; ofsZ = %.5f" % (offset[0],offset[1],offset[2])
|
2006-06-30 11:22:45 +00:00
|
|
|
return axis, size2, offset
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
ti = 0
|
2006-06-30 11:22:45 +00:00
|
|
|
alphaflag = 0 #switched to 1 if some tex in this block is using alpha
|
|
|
|
lastimag = 0 #experimental ....
|
2006-06-29 07:06:54 +00:00
|
|
|
for blok in surf['BLOK']:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "#...................................................................#"
|
|
|
|
###if DEBUG: print "# Processing texture block no.%s for surf %s" % (ti,surf['NAME'])
|
|
|
|
###if DEBUG: print "#...................................................................#"
|
|
|
|
# tobj.pdict (blok)
|
2006-06-29 07:06:54 +00:00
|
|
|
if ti > 9: break #only 8 channels 0..7 allowed for texture mapping
|
2006-06-30 11:22:45 +00:00
|
|
|
#if not blok['ENAB']:
|
2007-04-04 09:11:03 +00:00
|
|
|
# ###if DEBUG: print "***Image is not ENABled! Quitting this block"
|
2006-06-30 11:22:45 +00:00
|
|
|
# break
|
2007-07-30 02:24:17 +00:00
|
|
|
if not('IMAG' in blok): # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "***No IMAGE for this block? Quitting"
|
2006-06-29 07:06:54 +00:00
|
|
|
break #extract out the image index within the clip_list
|
2006-06-30 11:22:45 +00:00
|
|
|
if blok['IMAG'] == 0: blok['IMAG'] = lastimag #experimental ....
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "looking for image number %d" % blok['IMAG']
|
2006-06-29 07:06:54 +00:00
|
|
|
ima = lookup_imag(clip_list, blok['IMAG'])
|
|
|
|
if ima == None:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "***Block index image not within CLIP list? Quitting Block"
|
2006-06-29 07:06:54 +00:00
|
|
|
break #safety check (paranoia setting)
|
|
|
|
img = ima['g_IMG']
|
2006-06-30 11:22:45 +00:00
|
|
|
lastimag = blok['IMAG'] #experimental ....
|
2006-06-29 07:06:54 +00:00
|
|
|
if img == None:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "***Failed to pre-allocate image %s found: giving up" % ima['BASENAME']
|
2006-06-29 07:06:54 +00:00
|
|
|
break
|
|
|
|
tname = str(ima['ID'])
|
2006-06-30 11:22:45 +00:00
|
|
|
if blok['ENAB']:
|
|
|
|
tname += "+"
|
|
|
|
else:
|
|
|
|
tname += "x" #let's signal when should not be enabled
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'CHAN' in blok: # has_key
|
2006-06-30 11:22:45 +00:00
|
|
|
tname += blok['CHAN']
|
2007-04-18 14:40:01 +00:00
|
|
|
newtex = bpy.data.textures.new(tname)
|
2006-06-30 11:22:45 +00:00
|
|
|
newtex.setType('Image') # make it anu image texture
|
2006-06-29 07:06:54 +00:00
|
|
|
newtex.image = img
|
|
|
|
#how does it extends beyond borders
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'WRAP' in blok: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
if (blok['WRAP'] == 3) or (blok['WRAP'] == 2):
|
|
|
|
newtex.setExtend('Extend')
|
|
|
|
elif (blok['WRAP'] == 1):
|
|
|
|
newtex.setExtend('Repeat')
|
|
|
|
elif (blok['WRAP'] == 0):
|
|
|
|
newtex.setExtend('Clip')
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "generated texture %s" % tname
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
#MapTo is determined by CHAN parameter
|
2006-06-30 11:22:45 +00:00
|
|
|
#assign some defaults
|
|
|
|
colfac = 1.0
|
|
|
|
dvar = 1.0
|
|
|
|
norfac = 0.5
|
|
|
|
nega = False
|
2006-06-29 07:06:54 +00:00
|
|
|
mapflag = Blender.Texture.MapTo.COL #default to color
|
2006-06-30 11:22:45 +00:00
|
|
|
maptype = Blender.Texture.Mappings.FLAT
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'CHAN' in blok: # has_key
|
|
|
|
if blok['CHAN'] == 'COLR' and 'OPACVAL' in blok: # has_key
|
2006-06-30 11:22:45 +00:00
|
|
|
colfac = blok['OPACVAL']
|
|
|
|
# Blender needs this to be clamped
|
|
|
|
colfac = max(0.0, min(1.0, colfac))
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> Col = %.3f" % colfac
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['CHAN'] == 'BUMP':
|
|
|
|
mapflag = Blender.Texture.MapTo.NOR
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPACVAL' in blok: norfac = blok['OPACVAL'] # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> Nor = %.3f" % norfac
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['CHAN'] == 'LUMI':
|
|
|
|
mapflag = Blender.Texture.MapTo.EMIT
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPACVAL' in blok: dvar = blok['OPACVAL'] # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> DVar = %.3f" % dvar
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['CHAN'] == 'DIFF':
|
|
|
|
mapflag = Blender.Texture.MapTo.REF
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPACVAL' in blok: dvar = blok['OPACVAL'] # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> DVar = %.3f" % dvar
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['CHAN'] == 'SPEC':
|
|
|
|
mapflag = Blender.Texture.MapTo.SPEC
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPACVAL' in blok: dvar = blok['OPACVAL'] # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> DVar = %.3f" % dvar
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['CHAN'] == 'TRAN':
|
|
|
|
mapflag = Blender.Texture.MapTo.ALPHA
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPACVAL' in blok: dvar = blok['OPACVAL'] # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> DVar = %.3f" % dvar
|
2006-06-30 11:22:45 +00:00
|
|
|
alphaflag = 1
|
|
|
|
nega = True
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'NEGA' in blok: # has_key
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Watch-out: effect of this texture channel must be INVERTED!"
|
2006-06-30 11:22:45 +00:00
|
|
|
nega = not nega
|
|
|
|
|
|
|
|
blendmode_list = ['Mix',
|
|
|
|
'Subtractive',
|
|
|
|
'Difference',
|
|
|
|
'Multiply',
|
|
|
|
'Divide',
|
|
|
|
'Mix with calculated alpha layer and stencil flag',
|
|
|
|
'Texture Displacement',
|
|
|
|
'Additive']
|
|
|
|
set_blendmode = 7 #default additive
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'OPAC' in blok: # has_key
|
2006-06-30 11:22:45 +00:00
|
|
|
set_blendmode = blok['OPAC']
|
|
|
|
if set_blendmode == 5: #transparency
|
|
|
|
newtex.imageFlags |= Blender.Texture.ImageFlags.CALCALPHA
|
|
|
|
if nega: newtex.flags |= Blender.Texture.Flags.NEGALPHA
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Set Texture -> MapTo -> Blending Mode = %s" % blendmode_list[set_blendmode]
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
#the TexCo flag is determined by PROJ parameter
|
2006-06-30 11:22:45 +00:00
|
|
|
axis = [Blender.Texture.Proj.X, Blender.Texture.Proj.Y, Blender.Texture.Proj.Z]
|
|
|
|
size = [1.0] * 3
|
|
|
|
ofs = [0.0] * 3
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'PROJ' in blok: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
if blok['PROJ'] == 0: #0 - Planar
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Flat projection"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.ORCO
|
|
|
|
maptype = Blender.Texture.Mappings.FLAT
|
2006-06-29 07:06:54 +00:00
|
|
|
elif blok['PROJ'] == 1: #1 - Cylindrical
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Cylindrical projection"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.ORCO
|
|
|
|
maptype = Blender.Texture.Mappings.TUBE
|
2006-06-29 07:06:54 +00:00
|
|
|
elif blok['PROJ'] == 2: #2 - Spherical
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Spherical projection"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.ORCO
|
|
|
|
maptype = Blender.Texture.Mappings.SPHERE
|
2006-06-29 07:06:54 +00:00
|
|
|
elif blok['PROJ'] == 3: #3 - Cubic
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Cubic projection"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.ORCO
|
|
|
|
maptype = Blender.Texture.Mappings.CUBE
|
2006-06-29 07:06:54 +00:00
|
|
|
elif blok['PROJ'] == 4: #4 - Front Projection
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "!!!Front projection"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.ORCO
|
|
|
|
maptype = Blender.Texture.Mappings.FLAT # ??? could it be a FLAT with some other TexCo type?
|
2006-06-29 07:06:54 +00:00
|
|
|
elif blok['PROJ'] == 5: #5 - UV
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "UVMapped"
|
2006-06-30 11:22:45 +00:00
|
|
|
coordflag = Blender.Texture.TexCo.UV
|
|
|
|
maptype = Blender.Texture.Mappings.FLAT #in case of UV default to FLAT mapping => effectively not used
|
|
|
|
if blok['PROJ'] != 5: #This holds for any projection map except UV
|
|
|
|
axis, size, ofs = output_size_ofs(obj_size, obj_pos, blok)
|
|
|
|
|
|
|
|
# Clamp ofs and size else blender will raise an error
|
|
|
|
for ii in xrange(3):
|
|
|
|
ofs[ii]= min(10.0, max(-10, ofs[ii]))
|
|
|
|
size[ii]= min(100, max(-100, size[ii]))
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
mat.setTexture(ti, newtex, coordflag, mapflag)
|
2006-06-30 11:22:45 +00:00
|
|
|
current_mtex = mat.getTextures()[ti]
|
|
|
|
current_mtex.mapping = maptype
|
|
|
|
current_mtex.colfac = colfac
|
|
|
|
current_mtex.dvar = dvar
|
|
|
|
current_mtex.norfac = norfac
|
|
|
|
current_mtex.neg = nega
|
|
|
|
current_mtex.xproj = axis[0]
|
|
|
|
current_mtex.yproj = axis[1]
|
|
|
|
current_mtex.zproj = axis[2]
|
|
|
|
current_mtex.size = tuple(size)
|
|
|
|
current_mtex.ofs = tuple(ofs)
|
|
|
|
if (set_blendmode == 5): #transparency
|
|
|
|
current_mtex.stencil = not (nega)
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
ti += 1
|
|
|
|
#end loop over bloks
|
2006-06-30 11:22:45 +00:00
|
|
|
return alphaflag
|
2005-10-11 02:32:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ========================================
|
|
|
|
# === Create and assign a new material ===
|
|
|
|
# ========================================
|
2006-01-29 19:17:53 +00:00
|
|
|
#def update_material(surf_list, ptag_dict, obj, clip_list, uv_dict, dir_part):
|
|
|
|
def update_material(clip_list, objspec, surf_list):
|
2006-06-29 07:06:54 +00:00
|
|
|
if (surf_list == []) or (objspec[5] == {}) or (objspec[1] == {}):
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "something getting wrong in update_material: dump follows ..."
|
|
|
|
###if DEBUG: print surf_list
|
|
|
|
###if DEBUG: print objspec[5]
|
|
|
|
###if DEBUG: print objspec[1]
|
2006-06-29 07:06:54 +00:00
|
|
|
return
|
|
|
|
obj_dict = objspec[1]
|
|
|
|
all_faces = objspec[3]
|
|
|
|
obj_dim_dict = objspec[4]
|
|
|
|
ptag_dict = objspec[5]
|
|
|
|
uvcoords_dict = objspec[7]
|
|
|
|
facesuv_dict = objspec[8]
|
|
|
|
for surf in surf_list:
|
2007-02-08 00:27:01 +00:00
|
|
|
if surf and surf['NAME'] in ptag_dict: # in ptag_dict.keys()
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "#-------------------------------------------------------------------#"
|
|
|
|
###if DEBUG: print "Processing surface (material): %s" % surf['NAME']
|
|
|
|
###if DEBUG: print "#-------------------------------------------------------------------#"
|
2006-06-29 07:06:54 +00:00
|
|
|
#material set up
|
|
|
|
facelist = ptag_dict[surf['NAME']]
|
|
|
|
#bounding box and position
|
|
|
|
cur_obj = obj_dict[surf['NAME']]
|
|
|
|
obj_size = obj_dim_dict[surf['NAME']][0]
|
|
|
|
obj_pos = obj_dim_dict[surf['NAME']][1]
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print surf
|
2006-06-29 07:06:54 +00:00
|
|
|
#uncomment this if material pre-allocated by read_surf
|
|
|
|
mat = surf['g_MAT']
|
|
|
|
if mat == None:
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "Sorry, no pre-allocated material to update. Giving up for %s." % surf['NAME']
|
2006-06-29 07:06:54 +00:00
|
|
|
break
|
|
|
|
#mat = Blender.Material.New(surf['NAME'])
|
|
|
|
#surf['g_MAT'] = mat
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'COLR' in surf: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
mat.rgbCol = surf['COLR']
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'LUMI' in surf:
|
2006-06-29 07:06:54 +00:00
|
|
|
mat.setEmit(surf['LUMI'])
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'GVAL' in surf: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
mat.setAdd(surf['GVAL'])
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'SPEC' in surf: # has_key
|
|
|
|
mat.setSpec(surf['SPEC']) #it should be * 2 but seems to be a bit higher lwo [0.0, 1.0] - blender [0.0, 2.0]
|
|
|
|
if 'DIFF' in surf: # has_key
|
|
|
|
mat.setRef(surf['DIFF']) #lwo [0.0, 1.0] - blender [0.0, 1.0]
|
|
|
|
if 'GLOS' in surf: # has_key #lwo [0.0, 1.0] - blender [0, 255]
|
|
|
|
glo = int(371.67 * surf['GLOS'] - 42.334) #linear mapping - seems to work better than exp mapping
|
|
|
|
if glo <32: glo = 32 #clamped to 32-255
|
2006-06-29 07:06:54 +00:00
|
|
|
if glo >255: glo = 255
|
|
|
|
mat.setHardness(glo)
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'TRNL' in surf: # has_key
|
2006-06-30 11:22:45 +00:00
|
|
|
mat.setTranslucency(surf['TRNL']) #NOT SURE ABOUT THIS lwo [0.0, 1.0] - blender [0.0, 1.0]
|
|
|
|
|
2007-07-30 02:24:17 +00:00
|
|
|
mm = mat.mode
|
2006-06-30 11:22:45 +00:00
|
|
|
mm |= Blender.Material.Modes.TRANSPSHADOW
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'REFL' in surf: # has_key
|
2006-06-30 11:22:45 +00:00
|
|
|
mat.setRayMirr(surf['REFL']) #lwo [0.0, 1.0] - blender [0.0, 1.0]
|
|
|
|
mm |= Blender.Material.Modes.RAYMIRROR
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'TRAN' in surf: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
mat.setAlpha(1.0-surf['TRAN']) #lwo [0.0, 1.0] - blender [1.0, 0.0]
|
|
|
|
mm |= Blender.Material.Modes.RAYTRANSP
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'RIND' in surf: # has_key
|
2006-06-29 07:06:54 +00:00
|
|
|
s = surf['RIND']
|
|
|
|
if s < 1.0: s = 1.0
|
|
|
|
if s > 3.0: s = 3.0
|
|
|
|
mat.setIOR(s) #clipped to blender [1.0, 3.0]
|
|
|
|
mm |= Blender.Material.Modes.RAYTRANSP
|
2007-07-30 02:24:17 +00:00
|
|
|
if 'BLOK' in surf and surf['BLOK'] != []:
|
2006-06-29 07:06:54 +00:00
|
|
|
#update the material according to texture.
|
2006-06-30 11:22:45 +00:00
|
|
|
alphaflag = create_blok(surf, mat, clip_list, obj_size, obj_pos)
|
|
|
|
if alphaflag:
|
|
|
|
mm |= Blender.Material.Modes.RAYTRANSP
|
2007-07-30 02:24:17 +00:00
|
|
|
mat.mode = mm
|
2006-06-29 07:06:54 +00:00
|
|
|
#finished setting up the material
|
|
|
|
#end if exist SURF
|
|
|
|
#end loop on materials (SURFs)
|
|
|
|
return
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2004-06-07 01:34:15 +00:00
|
|
|
|
|
|
|
# ======================
|
|
|
|
# === Read Faces 6.0 ===
|
|
|
|
# ======================
|
|
|
|
def read_faces_6(lwochunk):
|
2006-06-29 07:06:54 +00:00
|
|
|
data = cStringIO.StringIO(lwochunk.read())
|
|
|
|
faces = []
|
|
|
|
polygon_type = data.read(4)
|
|
|
|
subsurf = 0
|
|
|
|
if polygon_type != "FACE" and polygon_type != "PTCH":
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "No FACE/PATCH Were Found. Polygon Type: %s" % polygon_type
|
2006-06-30 11:22:45 +00:00
|
|
|
return "", 2
|
2006-06-29 07:06:54 +00:00
|
|
|
if polygon_type == 'PTCH': subsurf = 1
|
|
|
|
i = 0
|
|
|
|
while(i < lwochunk.chunksize-4):
|
2006-06-30 11:22:45 +00:00
|
|
|
#if not i%1000 and my_meshtools.show_progress:
|
|
|
|
# Blender.Window.DrawProgressBar(float(i)/lwochunk.chunksize, "Reading Faces")
|
2006-06-29 07:06:54 +00:00
|
|
|
facev = []
|
|
|
|
numfaceverts, = struct.unpack(">H", data.read(2))
|
|
|
|
i += 2
|
|
|
|
|
|
|
|
for j in xrange(numfaceverts):
|
|
|
|
index, index_size = read_vx(data)
|
|
|
|
i += index_size
|
|
|
|
facev.append(index)
|
|
|
|
faces.append(facev)
|
2007-04-04 09:11:03 +00:00
|
|
|
###if DEBUG: print "read %s faces; type of block %d (0=FACE; 1=PATCH)" % (len(faces), subsurf)
|
2006-06-29 07:06:54 +00:00
|
|
|
return faces, subsurf
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2006-07-03 20:17:40 +00:00
|
|
|
def main():
|
|
|
|
if not struct:
|
|
|
|
Blender.Draw.PupMenu('This importer requires a full python install')
|
|
|
|
return
|
|
|
|
|
|
|
|
Blender.Window.FileSelector(read, "Import LWO", '*.lwo')
|
2005-10-11 02:32:58 +00:00
|
|
|
|
2007-05-09 07:03:05 +00:00
|
|
|
if __name__=='__main__':
|
|
|
|
main()
|
2007-04-21 11:23:45 +00:00
|
|
|
|
2008-01-24 20:31:11 +00:00
|
|
|
|
2007-07-30 02:24:17 +00:00
|
|
|
# Cams debugging lwo loader
|
|
|
|
"""
|
2006-06-29 07:06:54 +00:00
|
|
|
TIME= Blender.sys.time()
|
|
|
|
import os
|
|
|
|
print 'Searching for files'
|
2007-04-21 11:23:45 +00:00
|
|
|
os.system('find /fe/lwo/Objects/ -follow -iname "*.lwo" > /tmp/templwo_list')
|
2006-06-29 07:06:54 +00:00
|
|
|
# os.system('find /storage/ -iname "*.lwo" > /tmp/templwo_list')
|
|
|
|
print '...Done'
|
|
|
|
file= open('/tmp/templwo_list', 'r')
|
|
|
|
lines= file.readlines()
|
2007-08-01 10:19:16 +00:00
|
|
|
|
|
|
|
# sort by filesize for faster testing
|
|
|
|
lines_size = [(os.path.getsize(f[:-1]), f[:-1]) for f in lines]
|
|
|
|
lines_size.sort()
|
|
|
|
lines = [f[1] for f in lines_size]
|
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
file.close()
|
|
|
|
|
|
|
|
def between(v,a,b):
|
|
|
|
if v <= max(a,b) and v >= min(a,b):
|
|
|
|
return True
|
2006-06-30 11:22:45 +00:00
|
|
|
|
2006-06-29 07:06:54 +00:00
|
|
|
return False
|
2006-07-02 09:44:44 +00:00
|
|
|
size= 0.0
|
2006-06-29 07:06:54 +00:00
|
|
|
for i, _lwo in enumerate(lines):
|
2007-02-08 00:27:01 +00:00
|
|
|
#if i==425: # SCANFILL
|
|
|
|
#if 1:
|
|
|
|
#if i==520: # SCANFILL CRASH
|
|
|
|
#if i==47: # SCANFILL CRASH
|
|
|
|
#if between(i, 525, 550):
|
|
|
|
#if i > 1635:
|
|
|
|
#if i != 1519: # 730
|
2007-08-01 10:19:16 +00:00
|
|
|
if i>141:
|
|
|
|
#if 1:
|
|
|
|
# _lwo= _lwo[:-1]
|
2006-06-29 07:06:54 +00:00
|
|
|
print 'Importing', _lwo, '\nNUMBER', i, 'of', len(lines)
|
|
|
|
_lwo_file= _lwo.split('/')[-1].split('\\')[-1]
|
2007-04-18 14:40:01 +00:00
|
|
|
newScn= bpy.data.scenes.new(_lwo_file)
|
|
|
|
bpy.data.scenes.active = newScn
|
2006-07-02 09:44:44 +00:00
|
|
|
size += ((os.path.getsize(_lwo)/1024.0))/ 1024.0
|
2006-06-29 07:06:54 +00:00
|
|
|
read(_lwo)
|
2006-07-02 09:44:44 +00:00
|
|
|
# Remove objects to save memory?
|
|
|
|
'''
|
2006-12-25 09:17:23 +00:00
|
|
|
for ob in newScn.objects:
|
2007-04-04 09:11:03 +00:00
|
|
|
if ob.type=='Mesh':
|
2006-07-02 09:44:44 +00:00
|
|
|
me= ob.getData(mesh=1)
|
|
|
|
me.verts= None
|
|
|
|
newScn.unlink(ob)
|
|
|
|
'''
|
|
|
|
print 'mb size so far', size
|
2006-06-29 07:06:54 +00:00
|
|
|
|
|
|
|
print 'TOTAL TIME: %.6f' % (Blender.sys.time() - TIME)
|
2007-05-09 07:03:05 +00:00
|
|
|
"""
|