Merged changes in the trunk up to revision 35828.

Conflicts resolved:
source/blender/makesrna/RNA_enum_types.h

According to the changes in revision 35667, the Freestyle-related code
in release/scripts/ui/ was moved to release/scripts/startup/bl_ui/.
This commit is contained in:
Tamito Kajiyama 2011-03-27 23:11:22 +00:00
commit 040f0a1096
395 changed files with 7190 additions and 17515 deletions

@ -84,6 +84,10 @@ else()
option(WITH_OPENMP "Enable OpenMP (has to be supported by the compiler)" OFF)
endif()
if(UNIX AND NOT APPLE)
option(WITH_X11_XINPUT "Enable X11 Xinput (tablet support)" ON)
endif()
# Modifiers
option(WITH_MOD_FLUID "Enable Elbeem Modifier (Fluid Simulation)" ON)
option(WITH_MOD_DECIMATE "Enable Decimate Modifier" ON)
@ -107,11 +111,7 @@ endif()
# 3D format support
# disable opencollada on non-apple unix because opencollada has no package for debian
if(UNIX AND NOT APPLE)
option(WITH_OPENCOLLADA "Enable OpenCollada Support (http://www.opencollada.org)" OFF)
else()
option(WITH_OPENCOLLADA "Enable OpenCollada Support (http://www.opencollada.org)" OFF)
endif()
option(WITH_OPENCOLLADA "Enable OpenCollada Support (http://www.opencollada.org)" OFF)
# Sound output
option(WITH_SDL "Enable SDL for sound and joystick support" ON)
@ -384,7 +384,11 @@ if(UNIX AND NOT APPLE)
mark_as_advanced(X11_XF86keysym_INCLUDE_PATH)
# OpenSuse needs lutil, ArchLinux not, for now keep, can avoid by using --as-needed
set(LLIBS "-lutil -lc -lm -lpthread -lstdc++ ${X11_X11_LIB} ${X11_Xinput_LIB}")
set(LLIBS "-lutil -lc -lm -lpthread -lstdc++ ${X11_X11_LIB}")
if(WITH_X11_XINPUT)
list(APPEND LLIBS ${X11_Xinput_LIB})
endif()
if(CMAKE_SYSTEM_NAME MATCHES "Linux")
if(NOT WITH_PYTHON_MODULE)
@ -1016,6 +1020,13 @@ if(WITH_OPENMP)
mark_as_advanced(OpenMP_CXX_FLAGS)
endif()
#-----------------------------------------------------------------------------
# Configure Python.
if(WITH_PYTHON_MODULE)
add_definitions(-DPy_ENABLE_SHARED)
endif()
#-----------------------------------------------------------------------------
# Extra compile flags
@ -1065,6 +1076,13 @@ elseif(CMAKE_C_COMPILER_ID MATCHES "Intel")
endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
if(WITH_IK_ITASC OR WITH_MOD_FLUID)
message(WARNING "Using Clang as CXX compiler: disabling WITH_IK_ITASC and WITH_MOD_FLUID, these features will be missing.")
set(WITH_IK_ITASC OFF)
set(WITH_MOD_FLUID OFF)
endif()
endif()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${PLATFORM_CFLAGS} ${C_WARNINGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${PLATFORM_CFLAGS} ${CXX_WARNINGS}")

@ -90,6 +90,10 @@ package_debian:
package_pacman:
cd build_files/package_spec/pacman ; MAKEFLAGS="-j$(NPROCS)" makepkg --asroot
package_archive:
cd $(BUILD_DIR) ; make -s package_archive
@echo archive in "$(BUILD_DIR)/release"
# forward build targets
test:
cd $(BUILD_DIR) ; ctest . --output-on-failure

@ -669,7 +669,14 @@ if 'blenderlite' in B.targets:
Depends(nsiscmd, allinstall)
buildslave_action = env.Action(btools.buildslave, btools.buildslave_print)
buildslave_cmd = env.Command('buildslave_exec', None, buildslave_action)
buildslave_alias = env.Alias('buildslave', buildslave_cmd)
Depends(buildslave_cmd, allinstall)
Default(B.program_list)
if not env['WITHOUT_BF_INSTALL']:
Default(installtarget)

@ -0,0 +1,176 @@
# -*- python -*-
# ex: set syntax=python:
# <pep8 compliant>
# Dictionary that the buildmaster pays attention to.
c = BuildmasterConfig = {}
# BUILD SLAVES
#
# We load the slaves and their passwords from a separator file, so we can have
# this one in SVN.
from buildbot.buildslave import BuildSlave
import master_private
c['slaves'] = []
for slave in master_private.slaves:
c['slaves'].append(BuildSlave(slave['name'], slave['password']))
# TCP port through which slaves connect
c['slavePortnum'] = 9989
# CHANGE SOURCES
from buildbot.changes.svnpoller import SVNPoller
c['change_source'] = SVNPoller(
'https://svn.blender.org/svnroot/bf-blender/trunk/',
pollinterval=1200)
# BUILDERS
#
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which slaves can execute them. Note that any particular build will
# only take place on one slave.
from buildbot.process.factory import BuildFactory
from buildbot.steps.source import SVN
from buildbot.steps.shell import ShellCommand
from buildbot.steps.shell import Compile
from buildbot.steps.shell import Test
from buildbot.steps.transfer import FileUpload
# from buildbot.steps.transfer import FileDownload
from buildbot.steps.master import MasterShellCommand
from buildbot.config import BuilderConfig
# add builder utility
c['builders'] = []
buildernames = []
def add_builder(c, name, libdir, factory):
slavenames = []
for slave in master_private.slaves:
if name in slave['builders']:
slavenames.append(slave['name'])
if len(slavenames) > 0:
f = factory(name, libdir)
c['builders'].append(BuilderConfig(name=name, slavenames=slavenames, factory=f, category='blender'))
buildernames.append(name)
# common steps
def svn_step():
return SVN(baseURL='https://svn.blender.org/svnroot/bf-blender/%%BRANCH%%/blender', mode='update', defaultBranch='trunk', workdir='blender')
def lib_svn_step(dir):
return SVN(name='lib svn', baseURL='https://svn.blender.org/svnroot/bf-blender/%%BRANCH%%/lib/' + dir, mode='update', defaultBranch='trunk', workdir='lib/' + dir)
# generic builder
def generic_builder(id, libdir=""):
filename = 'buildbot_upload_' + id + '.zip'
compile_script = '../blender/build_files/buildbot/slave_compile.py'
test_script = '../blender/build_files/buildbot/slave_test.py'
pack_script = '../blender/build_files/buildbot/slave_pack.py'
unpack_script = 'master_unpack.py'
f = BuildFactory()
f.addStep(svn_step())
if libdir != '':
f.addStep(lib_svn_step(libdir))
f.addStep(Compile(command=['python', compile_script, id]))
f.addStep(Test(command=['python', test_script, id]))
f.addStep(ShellCommand(name='package', command=['python', pack_script, id], description='packaging', descriptionDone='packaged'))
if id.find('cmake') != -1:
f.addStep(FileUpload(name='upload', slavesrc='buildbot_upload.zip', masterdest=filename, maxsize=100 * 1024 * 1024))
else:
f.addStep(FileUpload(name='upload', slavesrc='buildbot_upload.zip', masterdest=filename, maxsize=100 * 1024 * 1024, workdir='install'))
f.addStep(MasterShellCommand(name='unpack', command=['python', unpack_script, filename], description='unpacking', descriptionDone='unpacked'))
return f
# builders
add_builder(c, 'mac_x86_64_cmake', 'darwin-9.x.universal', generic_builder)
add_builder(c, 'mac_i386_cmake', 'darwin-9.x.universal', generic_builder)
add_builder(c, 'mac_ppc_cmake', 'darwin-9.x.universal', generic_builder)
add_builder(c, 'linux_x86_64_cmake', '', generic_builder)
add_builder(c, 'linux_x86_64_scons', '', generic_builder)
add_builder(c, 'win32_scons', 'windows', generic_builder)
# SCHEDULERS
#
# Decide how to react to incoming changes.
# from buildbot.scheduler import Scheduler
from buildbot.schedulers import timed
c['schedulers'] = []
#c['schedulers'].append(Scheduler(name="all", branch=None,
# treeStableTimer=None,
# builderNames=[]))
#c['schedulers'].append(timed.Periodic(name="nightly",
# builderNames=buildernames,
# periodicBuildTimer=24*60*60))
c['schedulers'].append(timed.Nightly(name='nightly',
builderNames=buildernames,
hour=3,
minute=0))
# STATUS TARGETS
#
# 'status' is a list of Status Targets. The results of each build will be
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
c['status'] = []
from buildbot.status import html
from buildbot.status.web import authz
authz_cfg = authz.Authz(
# change any of these to True to enable; see the manual for more
# options
gracefulShutdown=False,
forceBuild=True, # use this to test your slave once it is set up
forceAllBuilds=False,
pingBuilder=False,
stopBuild=False,
stopAllBuilds=False,
cancelPendingBuild=False,
)
c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg))
# PROJECT IDENTITY
c['projectName'] = "Blender"
c['projectURL'] = "http://www.blender.org"
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.WebStatus page) is visible. This
# typically uses the port number set in the Waterfall 'status' entry, but
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
c['buildbotURL'] = "http://builder.blender.org/"
# DB URL
#
# This specifies what database buildbot uses to store change and scheduler
# state. You can leave this at its default for all but the largest
# installations.
c['db_url'] = "sqlite:///state.sqlite"

@ -0,0 +1,121 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Runs on Buildbot master, to unpack incoming unload.zip into latest
# builds directory and remove older builds.
# <pep8 compliant>
import os
import shutil
import sys
import zipfile
# extension stripping
def strip_extension(filename):
extensions = '.zip', '.tar', '.bz2', '.gz', '.tgz', '.tbz', '.exe'
filename_noext, ext = os.path.splitext(filename)
if ext in extensions:
return strip_extension(filename_noext) # may have .tar.bz2
else:
return filename
# extract platform from package name
def get_platform(filename):
# name is blender-version-platform.extension. we want to get the
# platform out, but there may be some variations, so we fiddle a
# bit to handle current and hopefully future names
filename = strip_extension(filename)
tokens = filename.split("-")
platforms = 'osx', 'mac', 'bsd', 'win', 'linux', 'source', 'irix', 'solaris'
platform_tokens = []
found = False
for i, token in enumerate(tokens):
if not found:
for platform in platforms:
if platform in token.lower():
found = True
break
if found:
platform_tokens += [token]
return '-'.join(platform_tokens)
# get filename
if len(sys.argv) < 2:
sys.stderr.write("Not enough arguments, expecting file to unpack\n")
sys.exit(1)
filename = sys.argv[1]
# open zip file
if not os.path.exists(filename):
sys.stderr.write("File %r not found.\n" % filename)
sys.exit(1)
try:
z = zipfile.ZipFile(filename, "r")
except Exception, ex:
sys.stderr.write('Failed to open zip file: %s\n' % str(ex))
sys.exit(1)
if len(z.namelist()) != 1:
sys.stderr.write("Expected one file in %r." % filename)
sys.exit(1)
package = z.namelist()[0]
packagename = os.path.basename(package)
# detect platform
platform = get_platform(packagename)
if platform == '':
sys.stderr.write('Failed to detect platform from package: %r\n' % packagename)
sys.exit(1)
# extract
dir = 'public_html/download'
try:
zf = z.open(package)
f = file(os.path.join(dir, packagename), "wb")
shutil.copyfileobj(zf, f)
zf.close()
z.close()
os.remove(filename)
except Exception, ex:
sys.stderr.write('Failed to unzip package: %s\n' % str(ex))
sys.exit(1)
# remove other files from the same platform
try:
for f in os.listdir(dir):
if platform.lower() in f.lower():
if f != packagename:
os.remove(os.path.join(dir, f))
except Exception, ex:
sys.stderr.write('Failed to remove old packages: %s\n' % str(ex))
sys.exit(1)

@ -0,0 +1,60 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import os
import subprocess
import sys
# get builder name
if len(sys.argv) < 2:
sys.stderr.write("Not enough arguments, expecting builder name\n")
sys.exit(1)
builder = sys.argv[1]
# we run from build/ directory
blender_dir = '../blender'
if builder.find('cmake') != -1:
# cmake
# set build options
cmake_options = ['-DCMAKE_BUILD_TYPE:STRING=Release']
if builder == 'mac_x86_64_cmake':
cmake_options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=x86_64')
elif builder == 'mac_i386_cmake':
cmake_options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=i386')
elif builder == 'mac_ppc_cmake':
cmake_options.append('-DCMAKE_OSX_ARCHITECTURES:STRING=ppc')
# configure and make
retcode = subprocess.call(['cmake', blender_dir] + cmake_options)
if retcode != 0:
sys.exit(retcode)
retcode = subprocess.call(['make', '-s', '-j4', 'install'])
sys.exit(retcode)
else:
# scons
os.chdir(blender_dir)
scons_options = []
retcode = subprocess.call(['python', 'scons/scons.py'] + scons_options)
sys.exit(retcode)

@ -0,0 +1,87 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Runs on buildbot slave, creating a release package using the build
# system and zipping it into buildbot_upload.zip. This is then uploaded
# to the master in the next buildbot step.
import os
import subprocess
import sys
import zipfile
# get builder name
if len(sys.argv) < 2:
sys.stderr.write("Not enough arguments, expecting builder name\n")
sys.exit(1)
builder = sys.argv[1]
# scons does own packaging
if builder.find('scons') != -1:
os.chdir('../blender')
retcode = subprocess.call(['python', 'scons/scons.py', 'BF_QUICK=slnt', 'buildslave'])
sys.exit(retcode)
# clean release directory if it already exists
dir = 'release'
if os.path.exists(dir):
for f in os.listdir(dir):
if os.path.isfile(os.path.join(dir, f)):
os.remove(os.path.join(dir, f))
# create release package
try:
subprocess.call(['make', 'package_archive'])
except Exception, ex:
sys.stderr.write('Make package release failed' + str(ex) + '\n')
sys.exit(1)
# find release directory, must exist this time
if not os.path.exists(dir):
sys.stderr.write("Failed to find release directory.\n")
sys.exit(1)
# find release package
file = None
filepath = None
for f in os.listdir(dir):
rf = os.path.join(dir, f)
if os.path.isfile(rf) and f.startswith('blender'):
file = f
filepath = rf
if not file:
sys.stderr.write("Failed to find release package.\n")
sys.exit(1)
# create zip file
try:
upload_zip = "buildbot_upload.zip"
if os.path.exists(upload_zip):
os.remove(upload_zip)
z = zipfile.ZipFile(upload_zip, "w", compression=zipfile.ZIP_STORED)
z.write(filepath, arcname=file)
z.close()
except Exception, ex:
sys.stderr.write('Create buildbot_upload.zip failed' + str(ex) + '\n')
sys.exit(1)

@ -0,0 +1,40 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import subprocess
import sys
# get builder name
if len(sys.argv) < 2:
sys.stderr.write("Not enough arguments, expecting builder name\n")
sys.exit(1)
builder = sys.argv[1]
# we run from build/ directory
blender_dir = '../blender'
if builder.find('cmake') != -1:
# cmake
retcode = subprocess.call(['ctest', '.' '--output-on-failure'])
sys.exit(retcode)
else:
# scons
pass

@ -17,17 +17,21 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Campbell Barton
# Contributor(s): Campbell Barton, M.G. Kishalmi
#
# ***** END GPL LICENSE BLOCK *****
# <pep8 compliant>
"""
Exampel Win32 usage:
Example Win32 usage:
c:\Python32\python.exe c:\blender_dev\blender\build_files\cmake\cmake_qtcreator_project.py c:\blender_dev\cmake_build
example linux usage
python .~/blenderSVN/blender/build_files/cmake/cmake_qtcreator_project.py ~/blenderSVN/cmake
"""
import sys
import os
from os.path import join, dirname, normpath, abspath, splitext, relpath, exists
@ -37,6 +41,20 @@ base = abspath(base)
SIMPLE_PROJECTFILE = False
# get cmake path
CMAKE_DIR = sys.argv[-1]
if not os.path.exists(os.path.join(CMAKE_DIR, "CMakeCache.txt")):
CMAKE_DIR = os.getcwd()
if not os.path.exists(os.path.join(CMAKE_DIR, "CMakeCache.txt")):
print("CMakeCache.txt not found in %r or %r\n Pass CMake build dir as an argument, or run from that dir, aborting" % (CMAKE_DIR, os.getcwd()))
sys.exit(1)
# could be either.
# PROJECT_DIR = base
PROJECT_DIR = CMAKE_DIR
def source_list(path, filename_check=None):
for dirpath, dirnames, filenames in os.walk(path):
@ -62,6 +80,16 @@ def is_c_header(filename):
return (ext in (".h", ".hpp", ".hxx"))
def is_py(filename):
ext = splitext(filename)[1]
return (ext == ".py")
def is_glsl(filename):
ext = splitext(filename)[1]
return (ext == ".glsl")
def is_c(filename):
ext = splitext(filename)[1]
return (ext in (".c", ".cpp", ".cxx", ".m", ".mm", ".rc"))
@ -78,19 +106,19 @@ def is_svn_file(filename):
def is_project_file(filename):
return (is_c_any(filename) or is_cmake(filename)) # and is_svn_file(filename)
return (is_c_any(filename) or is_cmake(filename) or is_glsl(filename)) # and is_svn_file(filename)
def cmake_advanced_info():
""" Extracr includes and defines from cmake.
"""
def create_eclipse_project(cmake_dir):
def create_eclipse_project(CMAKE_DIR):
import sys
if sys.platform == "win32":
cmd = 'cmake %r -G"Eclipse CDT4 - MinGW Makefiles"' % cmake_dir
cmd = 'cmake %r -G"Eclipse CDT4 - MinGW Makefiles"' % CMAKE_DIR
else:
cmd = 'cmake %r -G"Eclipse CDT4 - Unix Makefiles"' % cmake_dir
cmd = 'cmake %r -G"Eclipse CDT4 - Unix Makefiles"' % CMAKE_DIR
os.system(cmd)
@ -98,20 +126,11 @@ def cmake_advanced_info():
defines = []
import os
import sys
cmake_dir = sys.argv[-1]
if not os.path.exists(os.path.join(cmake_dir, "CMakeCache.txt")):
cmake_dir = os.getcwd()
if not os.path.exists(os.path.join(cmake_dir, "CMakeCache.txt")):
print("CMakeCache.txt not found in %r or %r\n Pass CMake build dir as an argument, or run from that dir, abording" % (cmake_dir, os.getcwd()))
sys.exit(1)
create_eclipse_project(cmake_dir)
create_eclipse_project(CMAKE_DIR)
from xml.dom.minidom import parse
tree = parse(os.path.join(cmake_dir, ".cproject"))
tree = parse(os.path.join(CMAKE_DIR, ".cproject"))
'''
f = open(".cproject_pretty", 'w')
f.write(tree.toprettyxml(indent=" ", newl=""))
@ -155,26 +174,60 @@ def cmake_advanced_info():
return includes, defines
def main():
def cmake_cache_var(var):
cache_file = open(os.path.join(CMAKE_DIR, "CMakeCache.txt"))
lines = [l_strip for l in cache_file for l_strip in (l.strip(),) if l_strip if not l_strip.startswith("//") if not l_strip.startswith("#")]
cache_file.close()
for l in lines:
if l.split(":")[0] == var:
return l.split("=", 1)[-1]
return None
def cmake_compiler_defines():
compiler = cmake_cache_var("CMAKE_C_COMPILER") # could do CXX too
if compiler is None:
print("Couldn't find the compiler, os defines will be omitted...")
return
import tempfile
temp_c = tempfile.mkstemp(suffix=".c")[1]
temp_def = tempfile.mkstemp(suffix=".def")[1]
os.system("%s -dM -E %s > %s" % (compiler, temp_c, temp_def))
temp_def_file = open(temp_def)
lines = [l.strip() for l in temp_def_file if l.strip()]
temp_def_file.close()
os.remove(temp_c)
os.remove(temp_def)
return lines
def create_qtc_project_main():
files = list(source_list(base, filename_check=is_project_file))
files_rel = [relpath(f, start=base) for f in files]
files_rel = [relpath(f, start=PROJECT_DIR) for f in files]
files_rel.sort()
# --- qtcreator spesific, simple format
# --- qtcreator specific, simple format
if SIMPLE_PROJECTFILE:
# --- qtcreator spesific, simple format
# --- qtcreator specific, simple format
PROJECT_NAME = "Blender"
f = open(join(base, "%s.files" % PROJECT_NAME), 'w')
f = open(join(PROJECT_DIR, "%s.files" % PROJECT_NAME), 'w')
f.write("\n".join(files_rel))
f = open(join(base, "%s.includes" % PROJECT_NAME), 'w')
f = open(join(PROJECT_DIR, "%s.includes" % PROJECT_NAME), 'w')
f.write("\n".join(sorted(list(set(dirname(f) for f in files_rel if is_c_header(f))))))
qtc_prj = join(base, "%s.creator" % PROJECT_NAME)
qtc_prj = join(PROJECT_DIR, "%s.creator" % PROJECT_NAME)
f = open(qtc_prj, 'w')
f.write("[General]\n")
qtc_cfg = join(base, "%s.config" % PROJECT_NAME)
qtc_cfg = join(PROJECT_DIR, "%s.config" % PROJECT_NAME)
if not exists(qtc_cfg):
f = open(qtc_cfg, 'w')
f.write("// ADD PREDEFINED MACROS HERE!\n")
@ -186,23 +239,55 @@ def main():
includes.sort()
PROJECT_NAME = "Blender"
f = open(join(base, "%s.files" % PROJECT_NAME), 'w')
FILE_NAME = PROJECT_NAME.lower()
f = open(join(PROJECT_DIR, "%s.files" % FILE_NAME), 'w')
f.write("\n".join(files_rel))
f = open(join(base, "%s.includes" % PROJECT_NAME), 'w')
f = open(join(PROJECT_DIR, "%s.includes" % FILE_NAME), 'w')
f.write("\n".join(sorted(includes)))
qtc_prj = join(base, "%s.creator" % PROJECT_NAME)
qtc_prj = join(PROJECT_DIR, "%s.creator" % FILE_NAME)
f = open(qtc_prj, 'w')
f.write("[General]\n")
qtc_cfg = join(base, "%s.config" % PROJECT_NAME)
qtc_cfg = join(PROJECT_DIR, "%s.config" % FILE_NAME)
f = open(qtc_cfg, 'w')
f.write("// ADD PREDEFINED MACROS HERE!\n")
f.write("\n".join([("#define %s %s" % item) for item in defines]))
defines_final = [("#define %s %s" % item) for item in defines]
defines_final += cmake_compiler_defines() # defines from the compiler
f.write("\n".join(defines_final))
print("Project file written to: %s" % qtc_prj)
print("Blender project file written to: %s" % qtc_prj)
# --- end
def create_qtc_project_python():
files = list(source_list(base, filename_check=is_py))
files_rel = [relpath(f, start=PROJECT_DIR) for f in files]
files_rel.sort()
# --- qtcreator specific, simple format
PROJECT_NAME = "Blender_Python"
FILE_NAME = PROJECT_NAME.lower()
f = open(join(PROJECT_DIR, "%s.files" % FILE_NAME), 'w')
f.write("\n".join(files_rel))
qtc_prj = join(PROJECT_DIR, "%s.creator" % FILE_NAME)
f = open(qtc_prj, 'w')
f.write("[General]\n")
qtc_cfg = join(PROJECT_DIR, "%s.config" % FILE_NAME)
if not exists(qtc_cfg):
f = open(qtc_cfg, 'w')
f.write("// ADD PREDEFINED MACROS HERE!\n")
print("Python project file written to: %s" % qtc_prj)
def main():
create_qtc_project_main()
create_qtc_project_python()
if __name__ == "__main__":
main()

@ -66,7 +66,7 @@ macro(SETUP_LIBDIRS)
link_directories(${JPEG_LIBPATH} ${PNG_LIBPATH} ${ZLIB_LIBPATH} ${FREETYPE_LIBPATH})
if(WITH_PYTHON AND NOT WITH_PYTHON_MODULE)
if(WITH_PYTHON) # AND NOT WITH_PYTHON_MODULE # WIN32 needs
link_directories(${PYTHON_LIBPATH})
endif()
if(WITH_INTERNATIONAL)
@ -127,7 +127,7 @@ macro(setup_liblinks
target_link_libraries(${target} ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} ${JPEG_LIBRARIES} ${PNG_LIBRARIES} ${ZLIB_LIBRARIES} ${LLIBS})
# since we are using the local libs for python when compiling msvc projects, we need to add _d when compiling debug versions
if(WITH_PYTHON AND NOT WITH_PYTHON_MODULE)
if(WITH_PYTHON) # AND NOT WITH_PYTHON_MODULE # WIN32 needs
target_link_libraries(${target} ${PYTHON_LINKFLAGS})
if(WIN32 AND NOT UNIX)

@ -51,7 +51,35 @@ if(APPLE)
# Libraries are bundled directly
set(CPACK_COMPONENT_LIBRARIES_HIDDEN TRUE)
endif(APPLE)
endif()
set(CPACK_PACKAGE_EXECUTABLES "blender")
include(CPack)
# Target for build_archive.py script, to automatically pass along
# version, revision, platform, build directory
macro(add_package_archive packagename extension)
set(build_archive python ${CMAKE_SOURCE_DIR}/build_files/package_spec/build_archive.py)
set(package_output ${CMAKE_BINARY_DIR}/release/${packagename}.${extension})
add_custom_target(package_archive DEPENDS ${package_output})
add_custom_command(
OUTPUT ${package_output}
COMMAND ${build_archive} ${packagename} ${extension} bin release
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
endmacro()
if(APPLE)
add_package_archive(
"blender-${BLENDER_VERSION}-r${BUILD_REV}-OSX-${CMAKE_OSX_ARCHITECTURES}"
"zip")
elseif(UNIX)
# platform name could be tweaked, to include glibc, and ensure processor is correct (i386 vs i686)
string(TOLOWER ${CMAKE_SYSTEM_NAME} PACKAGE_SYSTEM_NAME)
add_package_archive(
"blender-${BLENDER_VERSION}-r${BUILD_REV}-${PACKAGE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}"
"tar.bz2")
endif()

@ -0,0 +1,70 @@
#!/usr/bin/python
# <pep8 compliant>
import os
import shutil
import subprocess
import sys
# todo:
# strip executables
# get parameters
if len(sys.argv) < 5:
sys.stderr.write('Excepted arguments: ./build_archive.py name extension install_dir output_dir')
sys.exit(1)
package_name = sys.argv[1]
extension = sys.argv[2]
install_dir = sys.argv[3]
output_dir = sys.argv[4]
package_archive = os.path.join(output_dir, package_name + '.' + extension)
package_dir = package_name
# remove existing package with the same name
try:
if os.path.exists(package_archive):
os.remove(package_archive)
if os.path.exists(package_dir):
shutil.rmtree(package_dir)
except Exception, ex:
sys.stderr.write('Failed to clean up old package files: ' + str(ex) + '\n')
sys.exit(1)
# create temporary package dir
try:
shutil.copytree(install_dir, package_dir)
for f in os.listdir(package_dir):
if f.startswith('makes'):
os.remove(os.path.join(package_dir, f))
except Exception, ex:
sys.stderr.write('Failed to copy install directory: ' + str(ex) + '\n')
sys.exit(1)
# create archive
try:
if not os.path.exists(output_dir):
os.mkdir(output_dir)
if extension == 'zip':
archive_cmd = ['zip', '-9', '-r', package_archive, package_dir]
elif extension == 'tar.bz2':
archive_cmd = ['tar', 'cjf', package_archive, package_dir]
else:
sys.stderr.write('Unknown archive extension: ' + extension)
sys.exit(-1)
subprocess.call(archive_cmd)
except Exception, ex:
sys.stderr.write('Failed to create package archive: ' + str(ex) + '\n')
sys.exit(1)
# empty temporary package dir
try:
shutil.rmtree(package_dir)
except Exception, ex:
sys.stderr.write('Failed to clean up package directory: ' + str(ex) + '\n')
sys.exit(1)

@ -56,10 +56,16 @@ def get_version():
raise Exception("%s: missing version string" % fname)
def get_revision():
build_rev = os.popen('svnversion').read()[:-1] # remove \n
if build_rev == '' or build_rev==None:
build_rev = 'UNKNOWN'
return 'r' + build_rev
# This is used in creating the local config directories
VERSION, VERSION_DISPLAY = get_version()
REVISION = get_revision()
def print_arguments(args, bc):
if len(args):
@ -175,7 +181,7 @@ def print_targets(targs, bc):
def validate_targets(targs, bc):
valid_list = ['.', 'blender', 'blenderstatic', 'blenderplayer', 'webplugin',
'blendernogame', 'blenderstaticnogame', 'blenderlite', 'release',
'everything', 'clean', 'install-bin', 'install', 'nsis']
'everything', 'clean', 'install-bin', 'install', 'nsis','buildslave']
oklist = []
for t in targs:
if t in valid_list:
@ -496,6 +502,67 @@ def read_opts(env, cfg, args):
return localopts
def buildbot_zip(src, dest, package_name, extension):
import zipfile
ln = len(src)+1 # one extra to remove leading os.sep when cleaning root for package_root
flist = list()
# create list of tuples containing file and archive name
for root, dirs, files in os.walk(src):
package_root = os.path.join(package_name, root[ln:])
flist.extend([(os.path.join(root, file), os.path.join(package_root, file)) for file in files])
if extension == '.zip':
package = zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED)
package.comment = package_name + ' is a zip-file containing the Blender software. Visit http://www.blender.org for more information.'
for entry in flist:
package.write(entry[0], entry[1])
package.close()
else:
import tarfile
package = tarfile.open(dest, 'w:bz2')
for entry in flist:
package.add(entry[0], entry[1], recursive=False)
package.close()
bb_zip_name = os.path.normpath(src + os.sep + '..' + os.sep + 'buildbot_upload.zip')
print("creating %s" % (bb_zip_name))
bb_zip = zipfile.ZipFile(bb_zip_name, 'w', zipfile.ZIP_DEFLATED)
print("writing %s to %s" % (dest, bb_zip_name))
bb_zip.write(dest, os.path.split(dest)[1])
bb_zip.close()
print("done.")
def buildslave_print(target, source, env):
return "Running buildslave target"
def buildslave(target=None, source=None, env=None):
"""
Builder for buildbot integration. Used by buildslaves of http://builder.blender.org only.
"""
if env['OURPLATFORM'] in ('win32-vc', 'win64-vc', 'win32-mingw'):
extension = '.zip'
else:
extension = '.tar.bz2'
outdir = os.path.abspath(env['BF_INSTALLDIR'])
package_name = 'blender-' + VERSION+'-'+REVISION + '-' + env['OURPLATFORM'].split('-')[0]
package_dir = os.path.normpath(outdir + os.sep + '..' + os.sep + package_name)
package_archive = os.path.normpath(outdir + os.sep + '..' + os.sep + package_name + extension)
try:
if os.path.exists(package_archive):
os.remove(package_archive)
if os.path.exists(package_dir):
shutil.rmtree(package_dir)
except Exception, ex:
sys.stderr.write('Failed to clean up old package files: ' + str(ex) + '\n')
return 1
buildbot_zip(outdir, package_archive, package_name, extension)
return 0
def NSIS_print(target, source, env):
return "Creating NSIS installer for Blender"

@ -14,7 +14,7 @@ print(bpy.data.scenes.keys())
if "Cube" in bpy.data.meshes:
mesh = bpy.data.meshes["Cube"]
print("removing mesh", mesh)
bpy.data.meshes.unlink(mesh)
bpy.data.meshes.remove(mesh)
# write images into a file next to the blend
@ -22,6 +22,6 @@ import os
file = open(os.path.splitext(bpy.data.filepath)[0] + ".txt", 'w')
for image in bpy.data.images:
file.write("%s %dx%d\n" % (image.filepath, image.size[0], image.size[1]))
file.write("%s %d x %d\n" % (image.filepath, image.size[0], image.size[1]))
file.close()

@ -36,7 +36,7 @@ For HTML generation
sphinx-build doc/python_api/sphinx-in doc/python_api/sphinx-out
assuming that you have sphinx 0.6.7 installed
assuming that you have sphinx 1.0.7 installed
For PDF generation
------------------
@ -61,7 +61,7 @@ else:
"bpy.app",
"bpy.path",
"bpy.data",
#"bpy.props",
"bpy.props",
"bpy.utils",
"bpy.context",
# "bpy.types", # supports filtering
@ -104,8 +104,14 @@ EXAMPLE_SET = set()
EXAMPLE_SET_USED = set()
_BPY_STRUCT_FAKE = "bpy_struct"
_BPY_PROP_COLLECTION_FAKE = "bpy_prop_collection"
_BPY_FULL_REBUILD = False
if _BPY_PROP_COLLECTION_FAKE:
_BPY_PROP_COLLECTION_ID = ":class:`%s`" % _BPY_PROP_COLLECTION_FAKE
else:
_BPY_PROP_COLLECTION_ID = "collection"
def undocumented_message(module_name, type_name, identifier):
if str(type_name).startswith('<module'):
@ -157,6 +163,10 @@ def example_extract_docstring(filepath):
return "\n".join(text), line_no
def write_title(fw, text, heading_char):
fw("%s\n%s\n\n" % (text, len(text) * heading_char))
def write_example_ref(ident, fw, example_id, ext="py"):
if example_id in EXAMPLE_SET:
@ -312,8 +322,7 @@ def pymodule2sphinx(BASEPATH, module_name, module, title):
fw = file.write
fw(title + "\n")
fw(("=" * len(title)) + "\n\n")
write_title(fw, title, "=")
fw(".. module:: %s\n\n" % module_name)
@ -543,14 +552,18 @@ def pyrna2sphinx(BASEPATH):
if is_return:
id_name = "return"
id_type = "rtype"
kwargs = {"as_ret": True, "class_fmt": ":class:`%s`"}
kwargs = {"as_ret": True}
identifier = ""
else:
id_name = "arg"
id_type = "type"
kwargs = {"as_arg": True, "class_fmt": ":class:`%s`"}
kwargs = {"as_arg": True}
identifier = " %s" % prop.identifier
kwargs["class_fmt"] = ":class:`%s`"
kwargs["collection_id"] = _BPY_PROP_COLLECTION_ID
type_descr = prop.get_type_description(**kwargs)
if prop.name or prop.description:
fw(ident + ":%s%s: %s\n" % (id_name, identifier, ", ".join(val for val in (prop.name, prop.description) if val)))
@ -578,7 +591,7 @@ def pyrna2sphinx(BASEPATH):
else:
title = struct.identifier
fw("%s\n%s\n\n" % (title, "=" * len(title)))
write_title(fw, title, "=")
fw(".. module:: bpy.types\n\n")
@ -623,7 +636,7 @@ def pyrna2sphinx(BASEPATH):
sorted_struct_properties.sort(key=lambda prop: prop.identifier)
for prop in sorted_struct_properties:
type_descr = prop.get_type_description(class_fmt=":class:`%s`")
type_descr = prop.get_type_description(class_fmt=":class:`%s`", collection_id=_BPY_PROP_COLLECTION_ID)
# readonly properties use "data" directive, variables properties use "attribute" directive
if 'readonly' in type_descr:
fw(" .. data:: %s\n\n" % prop.identifier)
@ -654,7 +667,7 @@ def pyrna2sphinx(BASEPATH):
elif func.return_values: # multiple return values
fw(" :return (%s):\n" % ", ".join(prop.identifier for prop in func.return_values))
for prop in func.return_values:
type_descr = prop.get_type_description(as_ret=True, class_fmt=":class:`%s`")
type_descr = prop.get_type_description(as_ret=True, class_fmt=":class:`%s`", collection_id=_BPY_PROP_COLLECTION_ID)
descr = prop.description
if not descr:
descr = prop.name
@ -763,38 +776,46 @@ def pyrna2sphinx(BASEPATH):
continue
write_struct(struct)
# special case, bpy_struct
if _BPY_STRUCT_FAKE:
filepath = os.path.join(BASEPATH, "bpy.types.%s.rst" % _BPY_STRUCT_FAKE)
def fake_bpy_type(class_value, class_name, descr_str, use_subclasses=True):
filepath = os.path.join(BASEPATH, "bpy.types.%s.rst" % class_name)
file = open(filepath, "w")
fw = file.write
fw("%s\n" % _BPY_STRUCT_FAKE)
fw("=" * len(_BPY_STRUCT_FAKE) + "\n")
fw("\n")
write_title(fw, class_name, "=")
fw(".. module:: bpy.types\n")
fw("\n")
subclass_ids = [s.identifier for s in structs.values() if s.base is None if not rna_info.rna_id_ignore(s.identifier)]
if subclass_ids:
fw("subclasses --- \n" + ", ".join((":class:`%s`" % s) for s in sorted(subclass_ids)) + "\n\n")
if use_subclasses:
subclass_ids = [s.identifier for s in structs.values() if s.base is None if not rna_info.rna_id_ignore(s.identifier)]
if subclass_ids:
fw("subclasses --- \n" + ", ".join((":class:`%s`" % s) for s in sorted(subclass_ids)) + "\n\n")
fw(".. class:: %s\n\n" % _BPY_STRUCT_FAKE)
fw(" built-in base class for all classes in bpy.types.\n\n")
fw(".. class:: %s\n\n" % class_name)
fw(" %s\n\n" % descr_str)
fw(" .. note::\n\n")
fw(" Note that bpy.types.%s is not actually available from within blender, it only exists for the purpose of documentation.\n\n" % _BPY_STRUCT_FAKE)
fw(" Note that bpy.types.%s is not actually available from within blender, it only exists for the purpose of documentation.\n\n" % class_name)
descr_items = [(key, descr) for key, descr in sorted(bpy.types.Struct.__bases__[0].__dict__.items()) if not key.startswith("__")]
descr_items = [(key, descr) for key, descr in sorted(class_value.__dict__.items()) if not key.startswith("__")]
for key, descr in descr_items:
if type(descr) == MethodDescriptorType: # GetSetDescriptorType, GetSetDescriptorType's are not documented yet
py_descr2sphinx(" ", fw, descr, "bpy.types", _BPY_STRUCT_FAKE, key)
py_descr2sphinx(" ", fw, descr, "bpy.types", class_name, key)
for key, descr in descr_items:
if type(descr) == GetSetDescriptorType:
py_descr2sphinx(" ", fw, descr, "bpy.types", _BPY_STRUCT_FAKE, key)
py_descr2sphinx(" ", fw, descr, "bpy.types", class_name, key)
file.close()
# write fake classes
if _BPY_STRUCT_FAKE:
class_value = bpy.types.Struct.__bases__[0]
fake_bpy_type(class_value, _BPY_STRUCT_FAKE, "built-in base class for all classes in bpy.types.", use_subclasses=True)
if _BPY_PROP_COLLECTION_FAKE:
class_value = bpy.data.objects.__class__
fake_bpy_type(class_value, _BPY_PROP_COLLECTION_FAKE, "built-in class used for all collections.", use_subclasses=False)
# operators
def write_ops():
API_BASEURL = "https://svn.blender.org/svnroot/bf-blender/trunk/blender/release/scripts"
@ -810,7 +831,8 @@ def pyrna2sphinx(BASEPATH):
fw = file.write
title = "%s Operators" % op_module_name.replace("_", " ").title()
fw("%s\n%s\n\n" % (title, "=" * len(title)))
write_title(fw, title, "=")
fw(".. module:: bpy.ops.%s\n\n" % op_module_name)
@ -1084,7 +1106,9 @@ def rna2sphinx(BASEPATH):
fw("\n")
title = ":mod:`bpy` --- Blender Python Module"
fw("%s\n%s\n\n" % (title, "=" * len(title)))
write_title(fw, title, "=")
fw(".. module:: bpy.types\n\n")
file.close()

@ -205,14 +205,12 @@ public:
btScalar m_kAST; // Area/Angular stiffness coefficient [0,1]
btScalar m_kVST; // Volume stiffness coefficient [0,1]
int m_flags; // Flags
Material() : Element() {}
};
/* Feature */
struct Feature : Element
{
Material* m_material; // Material
Feature() : Element() {}
};
/* Node */
struct Node : Feature
@ -226,7 +224,6 @@ public:
btScalar m_area; // Area
btDbvtNode* m_leaf; // Leaf data
int m_battach:1; // Attached
Node() : Feature() {}
};
/* Link */
struct Link : Feature
@ -238,7 +235,6 @@ public:
btScalar m_c1; // rl^2
btScalar m_c2; // |gradient|^2/c0
btVector3 m_c3; // gradient
Link() : Feature() {}
};
/* Face */
struct Face : Feature
@ -247,7 +243,6 @@ public:
btVector3 m_normal; // Normal
btScalar m_ra; // Rest area
btDbvtNode* m_leaf; // Leaf data
Face() : Feature() {}
};
/* Tetra */
struct Tetra : Feature
@ -258,7 +253,6 @@ public:
btVector3 m_c0[4]; // gradients
btScalar m_c1; // (4*kVST)/(im0+im1+im2+im3)
btScalar m_c2; // m_c1/sum(|g0..3|^2)
Tetra() : Feature() {}
};
/* RContact */
struct RContact
@ -300,7 +294,6 @@ public:
int m_rank; // Rank
Node* m_nodes[4]; // Nodes
btScalar m_coords[4]; // Coordinates
Note() : Element() {}
};
/* Pose */
struct Pose

@ -118,6 +118,11 @@ if(APPLE)
endif()
elseif(UNIX)
if(WITH_X11_XINPUT)
add_definitions(-DWITH_X11_XINPUT)
endif()
list(APPEND INC ${X11_X11_INCLUDE_PATH})
list(APPEND SRC
@ -132,7 +137,9 @@ elseif(UNIX)
intern/GHOST_WindowX11.h
)
add_definitions(-DPREFIX="${CMAKE_INSTALL_PREFIX}")
if(NOT WITH_INSTALL_PORTABLE)
add_definitions(-DPREFIX="${CMAKE_INSTALL_PREFIX}")
endif()
if(X11_XF86keysym_INCLUDE_PATH)
add_definitions(-DWITH_XF86KEYSYM)

@ -262,7 +262,16 @@ extern int GHOST_DispatchEvents(GHOST_SystemHandle systemhandle);
*/
extern GHOST_TSuccess GHOST_AddEventConsumer(GHOST_SystemHandle systemhandle,
GHOST_EventConsumerHandle consumerhandle);
/**
* Remove the given event consumer to our list.
* @param systemhandle The handle to the system
* @param consumerhandle The event consumer to remove.
* @return Indication of success.
*/
extern GHOST_TSuccess GHOST_RemoveEventConsumer(GHOST_SystemHandle systemhandle,
GHOST_EventConsumerHandle consumerhandle);
/***************************************************************************************
** Progress bar functionality
***************************************************************************************/

@ -290,7 +290,14 @@ public:
* @return Indication of success.
*/
virtual GHOST_TSuccess addEventConsumer(GHOST_IEventConsumer* consumer) = 0;
/**
* Removes the given event consumer to our list.
* @param consumer The event consumer to remove.
* @return Indication of success.
*/
virtual GHOST_TSuccess removeEventConsumer(GHOST_IEventConsumer* consumer) = 0;
/***************************************************************************************
** N-degree of freedom device management functionality
***************************************************************************************/

@ -21,7 +21,8 @@ if window_system in ('linux2', 'openbsd3', 'sunos5', 'freebsd7', 'freebsd8', 'fr
sources.remove('intern' + os.sep + f + 'Carbon.cpp')
except ValueError:
pass
defs += ['PREFIX=\\"/usr/local/\\"']
defs += ['PREFIX=\\"/usr/local/\\"'] # XXX, make an option
defs += ['WITH_X11_XINPUT'] # XXX, make an option
elif window_system in ('win32-vc', 'win32-mingw', 'cygwin', 'linuxcross', 'win64-vc'):
for f in pf:

@ -253,6 +253,13 @@ GHOST_TSuccess GHOST_AddEventConsumer(GHOST_SystemHandle systemhandle, GHOST_Eve
return system->addEventConsumer((GHOST_CallbackEventConsumer*)consumerhandle);
}
GHOST_TSuccess GHOST_RemoveEventConsumer(GHOST_SystemHandle systemhandle, GHOST_EventConsumerHandle consumerhandle)
{
GHOST_ISystem* system = (GHOST_ISystem*) systemhandle;
return system->removeEventConsumer((GHOST_CallbackEventConsumer*)consumerhandle);
}
GHOST_TSuccess GHOST_SetProgressBar(GHOST_WindowHandle windowhandle,float progress)
{
GHOST_IWindow* window = (GHOST_IWindow*) windowhandle;

@ -226,6 +226,17 @@ GHOST_TSuccess GHOST_System::addEventConsumer(GHOST_IEventConsumer* consumer)
return success;
}
GHOST_TSuccess GHOST_System::removeEventConsumer(GHOST_IEventConsumer* consumer)
{
GHOST_TSuccess success;
if (m_eventManager) {
success = m_eventManager->removeConsumer(consumer);
}
else {
success = GHOST_kFailure;
}
return success;
}
GHOST_TSuccess GHOST_System::pushEvent(GHOST_IEvent* event)
{

@ -183,7 +183,12 @@ public:
*/
virtual GHOST_TSuccess addEventConsumer(GHOST_IEventConsumer* consumer);
/**
* Remove the given event consumer to our list.
* @param consumer The event consumer to remove.
* @return Indication of success.
*/
virtual GHOST_TSuccess removeEventConsumer(GHOST_IEventConsumer* consumer);
/***************************************************************************************
** N-degree of freedom devcice management functionality

@ -43,11 +43,6 @@
#include <stdio.h> // for fprintf only
#include <cstdlib> // for exit
#ifndef PREFIX
# define PREFIX "/usr/local"
#endif
using namespace std;
GHOST_SystemPathsX11::GHOST_SystemPathsX11()
@ -60,7 +55,12 @@ GHOST_SystemPathsX11::~GHOST_SystemPathsX11()
const GHOST_TUns8* GHOST_SystemPathsX11::getSystemDir() const
{
/* no prefix assumes a portable build which only uses bundled scripts */
#ifdef PREFIX
return (GHOST_TUns8*) PREFIX "/share";
#else
return NULL;
#endif
}
const GHOST_TUns8* GHOST_SystemPathsX11::getUserDir() const

@ -76,10 +76,6 @@
#include <stdio.h> // for fprintf only
#include <cstdlib> // for exit
#ifndef PREFIX
#error "PREFIX not defined"
#endif
typedef struct NDOFPlatformInfo {
Display *display;
Window window;

@ -439,7 +439,9 @@ GHOST_WindowX11(
setTitle(title);
#ifdef WITH_X11_XINPUT
initXInputDevices();
#endif
// now set up the rendering context.
if (installDrawingContext(type) == GHOST_kSuccess) {
@ -453,6 +455,7 @@ GHOST_WindowX11(
XFlush(m_display);
}
#ifdef WITH_X11_XINPUT
/*
Dummy function to get around IO Handler exiting if device invalid
Basically it will not crash blender now if you have a X device that
@ -493,6 +496,7 @@ static bool match_token(const char *haystack, const char *needle)
return FALSE;
}
/* Determining if an X device is a Tablet style device is an imperfect science.
** We rely on common conventions around device names as well as the type reported
** by Wacom tablets. This code will likely need to be expanded for alternate tablet types
@ -652,8 +656,9 @@ void GHOST_WindowX11::initXInputDevices()
}
XFree(version);
}
}
}
#endif /* WITH_X11_XINPUT */
Window
GHOST_WindowX11::
@ -1275,14 +1280,16 @@ GHOST_WindowX11::
if (m_custom_cursor) {
XFreeCursor(m_display, m_custom_cursor);
}
#ifdef WITH_X11_XINPUT
/* close tablet devices */
if(m_xtablet.StylusDevice)
XCloseDevice(m_display, m_xtablet.StylusDevice);
if(m_xtablet.EraserDevice)
XCloseDevice(m_display, m_xtablet.EraserDevice);
#endif /* WITH_X11_XINPUT */
if (m_context != s_firstContext) {
glXDestroyContext(m_display, m_context);
}

@ -292,7 +292,7 @@ bool Scene::update(double timestamp, double timestep, unsigned int numsubstep, b
return true;
}
double maxqdot;
// double maxqdot; // UNUSED
e_scalar nlcoef;
SceneLock lockCallback(this);
Frame external_pose;
@ -454,7 +454,7 @@ bool Scene::update(double timestamp, double timestep, unsigned int numsubstep, b
// We will pass the joint velocity to each object and they will recommend a maximum timestep
timesubstep = timeleft;
// get armature max joint velocity to estimate the maximum duration of integration
maxqdot = m_qdot.cwise().abs().maxCoeff();
// maxqdot = m_qdot.cwise().abs().maxCoeff(); // UNUSED
double maxsubstep = nlcoef*m_maxstep;
if (maxsubstep < m_minstep)
maxsubstep = m_minstep;

@ -282,7 +282,6 @@ tbool genTangSpace(const SMikkTSpaceContext * pContext, const float fAngularThre
// Mark all degenerate triangles
iTotTris = iNrTrianglesIn;
iNrTrianglesIn = 0;
iDegenTriangles = 0;
for(t=0; t<iTotTris; t++)
{

@ -1,73 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# This directory is a Python package.
# To support reload properly, try to access a package var, if it's there, reload everything
if "init_data" in locals():
import imp
imp.reload(model)
imp.reload(operators)
imp.reload(client)
imp.reload(slave)
imp.reload(master)
imp.reload(master_html)
imp.reload(utils)
imp.reload(balancing)
imp.reload(ui)
imp.reload(repath)
imp.reload(versioning)
else:
from . import model
from . import operators
from . import client
from . import slave
from . import master
from . import master_html
from . import utils
from . import balancing
from . import ui
from . import repath
from . import versioning
jobs = []
slaves = []
blacklist = []
init_file = ""
valid_address = False
init_data = True
def register():
ui.addProperties()
import bpy
scene = bpy.context.scene
if scene:
netsettings = scene.network_render
ui.init_data(netsettings)
bpy.utils.register_module(__name__)
def unregister():
import bpy
del bpy.types.Scene.network_render
bpy.utils.unregister_module(__name__)

@ -1,195 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import time
from netrender.utils import *
import netrender.model
class RatingRule:
def __init__(self):
self.enabled = True
def id(self):
return str(id(self))
def rate(self, job):
return 0
class ExclusionRule:
def __init__(self):
self.enabled = True
def id(self):
return str(id(self))
def test(self, job):
return False
class PriorityRule:
def __init__(self):
self.enabled = True
def id(self):
return str(id(self))
def test(self, job):
return False
class Balancer:
def __init__(self):
self.rules = []
self.priorities = []
self.exceptions = []
def ruleByID(self, rule_id):
for rule in self.rules:
if rule.id() == rule_id:
return rule
for rule in self.priorities:
if rule.id() == rule_id:
return rule
for rule in self.exceptions:
if rule.id() == rule_id:
return rule
return None
def addRule(self, rule):
self.rules.append(rule)
def addPriority(self, priority):
self.priorities.append(priority)
def addException(self, exception):
self.exceptions.append(exception)
def applyRules(self, job):
return sum((rule.rate(job) for rule in self.rules if rule.enabled))
def applyPriorities(self, job):
for priority in self.priorities:
if priority.enabled and priority.test(job):
return True # priorities are first
return False
def applyExceptions(self, job):
for exception in self.exceptions:
if exception.enabled and exception.test(job):
return True # exceptions are last
return False
def sortKey(self, job):
return (1 if self.applyExceptions(job) else 0, # exceptions after
0 if self.applyPriorities(job) else 1, # priorities first
self.applyRules(job))
def balance(self, jobs):
if jobs:
# use inline copy to make sure the list is still accessible while sorting
jobs[:] = sorted(jobs, key=self.sortKey)
return jobs[0]
else:
return None
# ==========================
class RatingUsage(RatingRule):
def __str__(self):
return "Usage per job"
def rate(self, job):
# less usage is better
return job.usage / job.priority
class RatingUsageByCategory(RatingRule):
def __init__(self, get_jobs):
super().__init__()
self.getJobs = get_jobs
def __str__(self):
return "Usage per category"
def rate(self, job):
total_category_usage = sum([j.usage for j in self.getJobs() if j.category == job.category])
maximum_priority = max([j.priority for j in self.getJobs() if j.category == job.category])
# less usage is better
return total_category_usage / maximum_priority
class NewJobPriority(PriorityRule):
def __init__(self, limit = 1):
super().__init__()
self.limit = limit
def setLimit(self, value):
self.limit = int(value)
def str_limit(self):
return "less than %i frame%s done" % (self.limit, "s" if self.limit > 1 else "")
def __str__(self):
return "Priority to new jobs"
def test(self, job):
return job.countFrames(status = DONE) < self.limit
class MinimumTimeBetweenDispatchPriority(PriorityRule):
def __init__(self, limit = 10):
super().__init__()
self.limit = limit
def setLimit(self, value):
self.limit = int(value)
def str_limit(self):
return "more than %i minute%s since last" % (self.limit, "s" if self.limit > 1 else "")
def __str__(self):
return "Priority to jobs that haven't been dispatched recently"
def test(self, job):
return job.countFrames(status = DISPATCHED) == 0 and (time.time() - job.last_dispatched) / 60 > self.limit
class ExcludeQueuedEmptyJob(ExclusionRule):
def __str__(self):
return "Exclude non queued or empty jobs"
def test(self, job):
return job.status != JOB_QUEUED or job.countFrames(status = QUEUED) == 0
class ExcludeSlavesLimit(ExclusionRule):
def __init__(self, count_jobs, count_slaves, limit = 0.75):
super().__init__()
self.count_jobs = count_jobs
self.count_slaves = count_slaves
self.limit = limit
def setLimit(self, value):
self.limit = float(value)
def str_limit(self):
return "more than %.0f%% of all slaves" % (self.limit * 100)
def __str__(self):
return "Exclude jobs that would use too many slaves"
def test(self, job):
return not ( self.count_jobs() == 1 or self.count_slaves() <= 1 or float(job.countSlaves() + 1) / self.count_slaves() <= self.limit )

@ -1,372 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import sys, os, re
import http, http.client, http.server, urllib
import subprocess, shutil, time, hashlib
import json
import netrender
import netrender.model
import netrender.slave as slave
import netrender.master as master
from netrender.utils import *
def addFluidFiles(job, path):
if os.path.exists(path):
pattern = re.compile("fluidsurface_(final|preview)_([0-9]+)\.(bobj|bvel)\.gz")
for fluid_file in sorted(os.listdir(path)):
match = pattern.match(fluid_file)
if match:
# fluid frames starts at 0, which explains the +1
# This is stupid
current_frame = int(match.groups()[1]) + 1
job.addFile(path + fluid_file, current_frame, current_frame)
def addPointCache(job, ob, point_cache, default_path):
if not point_cache.use_disk_cache:
return
name = point_cache.name
if name == "":
name = "".join(["%02X" % ord(c) for c in ob.name])
cache_path = bpy.path.abspath(point_cache.filepath) if point_cache.use_external else default_path
index = "%02i" % point_cache.index
if os.path.exists(cache_path):
pattern = re.compile(name + "_([0-9]+)_" + index + "\.bphys")
cache_files = []
for cache_file in sorted(os.listdir(cache_path)):
match = pattern.match(cache_file)
if match:
cache_frame = int(match.groups()[0])
cache_files.append((cache_frame, cache_file))
cache_files.sort()
if len(cache_files) == 1:
cache_frame, cache_file = cache_files[0]
job.addFile(cache_path + cache_file, cache_frame, cache_frame)
else:
for i in range(len(cache_files)):
current_item = cache_files[i]
next_item = cache_files[i+1] if i + 1 < len(cache_files) else None
previous_item = cache_files[i - 1] if i > 0 else None
current_frame, current_file = current_item
if not next_item and not previous_item:
job.addFile(cache_path + current_file, current_frame, current_frame)
elif next_item and not previous_item:
next_frame = next_item[0]
job.addFile(cache_path + current_file, current_frame, next_frame - 1)
elif not next_item and previous_item:
previous_frame = previous_item[0]
job.addFile(cache_path + current_file, previous_frame + 1, current_frame)
else:
next_frame = next_item[0]
previous_frame = previous_item[0]
job.addFile(cache_path + current_file, previous_frame + 1, next_frame - 1)
def fillCommonJobSettings(job, job_name, netsettings):
job.name = job_name
job.category = netsettings.job_category
for slave in netrender.blacklist:
job.blacklist.append(slave.id)
job.chunks = netsettings.chunks
job.priority = netsettings.priority
if netsettings.job_type == "JOB_BLENDER":
job.type = netrender.model.JOB_BLENDER
elif netsettings.job_type == "JOB_PROCESS":
job.type = netrender.model.JOB_PROCESS
elif netsettings.job_type == "JOB_VCS":
job.type = netrender.model.JOB_VCS
def clientSendJob(conn, scene, anim = False):
netsettings = scene.network_render
if netsettings.job_type == "JOB_BLENDER":
return clientSendJobBlender(conn, scene, anim)
elif netsettings.job_type == "JOB_VCS":
return clientSendJobVCS(conn, scene, anim)
def clientSendJobVCS(conn, scene, anim = False):
netsettings = scene.network_render
job = netrender.model.RenderJob()
if anim:
for f in range(scene.frame_start, scene.frame_end + 1):
job.addFrame(f)
else:
job.addFrame(scene.frame_current)
filename = bpy.data.filepath
if not filename.startswith(netsettings.vcs_wpath):
# this is an error, need better way to handle this
return
filename = filename[len(netsettings.vcs_wpath):]
if filename[0] in (os.sep, os.altsep):
filename = filename[1:]
print("CREATING VCS JOB", filename)
job.addFile(filename, signed=False)
job_name = netsettings.job_name
path, name = os.path.split(filename)
if job_name == "[default]":
job_name = name
fillCommonJobSettings(job, job_name, netsettings)
# VCS Specific code
job.version_info = netrender.model.VersioningInfo()
job.version_info.system = netsettings.vcs_system
job.version_info.wpath = netsettings.vcs_wpath
job.version_info.rpath = netsettings.vcs_rpath
job.version_info.revision = netsettings.vcs_revision
# try to send path first
conn.request("POST", "/job", json.dumps(job.serialize()))
response = conn.getresponse()
response.read()
job_id = response.getheader("job-id")
# a VCS job is always good right now, need error handling
return job_id
def clientSendJobBlender(conn, scene, anim = False):
netsettings = scene.network_render
job = netrender.model.RenderJob()
if anim:
for f in range(scene.frame_start, scene.frame_end + 1):
job.addFrame(f)
else:
job.addFrame(scene.frame_current)
filename = bpy.data.filepath
job.addFile(filename)
job_name = netsettings.job_name
path, name = os.path.split(filename)
if job_name == "[default]":
job_name = name
###########################
# LIBRARIES
###########################
for lib in bpy.data.libraries:
file_path = bpy.path.abspath(lib.filepath)
if os.path.exists(file_path):
job.addFile(file_path)
###########################
# IMAGES
###########################
for image in bpy.data.images:
if image.source == "FILE" and not image.packed_file:
file_path = bpy.path.abspath(image.filepath)
if os.path.exists(file_path):
job.addFile(file_path)
tex_path = os.path.splitext(file_path)[0] + ".tex"
if os.path.exists(tex_path):
job.addFile(tex_path)
###########################
# FLUID + POINT CACHE
###########################
root, ext = os.path.splitext(name)
default_path = path + os.sep + "blendcache_" + root + os.sep # need an API call for that
for object in bpy.data.objects:
for modifier in object.modifiers:
if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
addFluidFiles(job, bpy.path.abspath(modifier.settings.filepath))
elif modifier.type == "CLOTH":
addPointCache(job, object, modifier.point_cache, default_path)
elif modifier.type == "SOFT_BODY":
addPointCache(job, object, modifier.point_cache, default_path)
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
addPointCache(job, object, modifier.domain_settings.point_cache, default_path)
elif modifier.type == "MULTIRES" and modifier.is_external:
file_path = bpy.path.abspath(modifier.filepath)
job.addFile(file_path)
# particles modifier are stupid and don't contain data
# we have to go through the object property
for psys in object.particle_systems:
addPointCache(job, object, psys.point_cache, default_path)
#print(job.files)
fillCommonJobSettings(job, job_name, netsettings)
# try to send path first
conn.request("POST", "/job", json.dumps(job.serialize()))
response = conn.getresponse()
response.read()
job_id = response.getheader("job-id")
# if not ACCEPTED (but not processed), send files
if response.status == http.client.ACCEPTED:
for rfile in job.files:
f = open(rfile.filepath, "rb")
conn.request("PUT", fileURL(job_id, rfile.index), f)
f.close()
response = conn.getresponse()
response.read()
# server will reply with ACCEPTED until all files are found
return job_id
def requestResult(conn, job_id, frame):
conn.request("GET", renderURL(job_id, frame))
class NetworkRenderEngine(bpy.types.RenderEngine):
bl_idname = 'NET_RENDER'
bl_label = "Network Render"
bl_use_postprocess = False
def render(self, scene):
if scene.network_render.mode == "RENDER_CLIENT":
self.render_client(scene)
elif scene.network_render.mode == "RENDER_SLAVE":
self.render_slave(scene)
elif scene.network_render.mode == "RENDER_MASTER":
self.render_master(scene)
else:
print("UNKNOWN OPERATION MODE")
def render_master(self, scene):
netsettings = scene.network_render
address = "" if netsettings.server_address == "[default]" else netsettings.server_address
master.runMaster((address, netsettings.server_port), netsettings.use_master_broadcast, netsettings.use_master_clear, bpy.path.abspath(netsettings.path), self.update_stats, self.test_break)
def render_slave(self, scene):
slave.render_slave(self, scene.network_render, scene.render.threads)
def render_client(self, scene):
netsettings = scene.network_render
self.update_stats("", "Network render client initiation")
conn = clientConnection(netsettings.server_address, netsettings.server_port)
if conn:
# Sending file
self.update_stats("", "Network render exporting")
new_job = False
job_id = netsettings.job_id
# reading back result
self.update_stats("", "Network render waiting for results")
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
buf = response.read()
if response.status == http.client.NO_CONTENT:
new_job = True
netsettings.job_id = clientSendJob(conn, scene)
job_id = netsettings.job_id
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
buf = response.read()
while response.status == http.client.ACCEPTED and not self.test_break():
time.sleep(1)
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
buf = response.read()
# cancel new jobs (animate on network) on break
if self.test_break() and new_job:
conn.request("POST", cancelURL(job_id))
response = conn.getresponse()
response.read()
print( response.status, response.reason )
netsettings.job_id = 0
if response.status != http.client.OK:
conn.close()
return
r = scene.render
x= int(r.resolution_x*r.resolution_percentage*0.01)
y= int(r.resolution_y*r.resolution_percentage*0.01)
result_path = os.path.join(bpy.path.abspath(netsettings.path), "output.exr")
folder = os.path.split(result_path)[0]
if not os.path.exists(folder):
os.mkdir(folder)
f = open(result_path, "wb")
f.write(buf)
f.close()
result = self.begin_result(0, 0, x, y)
result.load_from_file(result_path)
self.end_result(result)
conn.close()
def compatible(module):
module = __import__(module)
for subclass in module.__dict__.values():
try: subclass.COMPAT_ENGINES.add('NET_RENDER')
except: pass
del module
compatible("properties_world")
compatible("properties_material")
compatible("properties_data_mesh")
compatible("properties_data_camera")
compatible("properties_texture")

File diff suppressed because it is too large Load Diff

@ -1,315 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import os
import re
import shutil
from netrender.utils import *
import netrender.model
src_folder = os.path.split(__file__)[0]
def get(handler):
def output(text):
handler.wfile.write(bytes(text, encoding='utf8'))
def head(title, refresh = False):
output("<html><head>")
if refresh:
output("<meta http-equiv='refresh' content=5>")
output("<script src='/html/netrender.js' type='text/javascript'></script>")
# output("<script src='/html/json2.js' type='text/javascript'></script>")
output("<title>")
output(title)
output("</title></head><body>")
output("<link rel='stylesheet' href='/html/netrender.css' type='text/css'>")
def link(text, url, script=""):
return "<a href='%s' %s>%s</a>" % (url, script, text)
def tag(name, text, attr=""):
return "<%s %s>%s</%s>" % (name, attr, text, name)
def startTable(border=1, class_style = None, caption = None):
output("<table border='%i'" % border)
if class_style:
output(" class='%s'" % class_style)
output(">")
if caption:
output("<caption>%s</caption>" % caption)
def headerTable(*headers):
output("<thead><tr>")
for c in headers:
output("<td>" + c + "</td>")
output("</tr></thead>")
def rowTable(*data, id = None, class_style = None, extra = None):
output("<tr")
if id:
output(" id='%s'" % id)
if class_style:
output(" class='%s'" % class_style)
if extra:
output(" %s" % extra)
output(">")
for c in data:
output("<td>" + str(c) + "</td>")
output("</tr>")
def endTable():
output("</table>")
def checkbox(title, value, script=""):
return """<input type="checkbox" title="%s" %s %s>""" % (title, "checked" if value else "", ("onclick=\"%s\"" % script) if script else "")
if handler.path == "/html/netrender.js":
f = open(os.path.join(src_folder, "netrender.js"), 'rb')
handler.send_head(content = "text/javascript")
shutil.copyfileobj(f, handler.wfile)
f.close()
elif handler.path == "/html/netrender.css":
f = open(os.path.join(src_folder, "netrender.css"), 'rb')
handler.send_head(content = "text/css")
shutil.copyfileobj(f, handler.wfile)
f.close()
elif handler.path == "/html" or handler.path == "/":
handler.send_head(content = "text/html")
head("NetRender", refresh = True)
output("<h2>Jobs</h2>")
startTable()
headerTable(
"&nbsp;",
"id",
"name",
"category",
"type",
"chunks",
"priority",
"usage",
"wait",
"status",
"length",
"done",
"dispatched",
"error",
"priority",
"exception"
)
handler.server.balance()
for job in handler.server.jobs:
results = job.framesStatus()
rowTable(
"""<button title="cancel job" onclick="cancel_job('%s');">X</button>""" % job.id +
"""<button title="pause job" onclick="request('/pause_%s', null);">P</button>""" % job.id +
"""<button title="reset all frames" onclick="request('/resetall_%s_0', null);">R</button>""" % job.id,
job.id,
link(job.name, "/html/job" + job.id),
job.category if job.category else "<i>None</i>",
netrender.model.JOB_TYPES[job.type],
str(job.chunks) +
"""<button title="increase chunks size" onclick="request('/edit_%s', &quot;{'chunks': %i}&quot;);">+</button>""" % (job.id, job.chunks + 1) +
"""<button title="decrease chunks size" onclick="request('/edit_%s', &quot;{'chunks': %i}&quot;);" %s>-</button>""" % (job.id, job.chunks - 1, "disabled=True" if job.chunks == 1 else ""),
str(job.priority) +
"""<button title="increase priority" onclick="request('/edit_%s', &quot;{'priority': %i}&quot;);">+</button>""" % (job.id, job.priority + 1) +
"""<button title="decrease priority" onclick="request('/edit_%s', &quot;{'priority': %i}&quot;);" %s>-</button>""" % (job.id, job.priority - 1, "disabled=True" if job.priority == 1 else ""),
"%0.1f%%" % (job.usage * 100),
"%is" % int(time.time() - job.last_dispatched),
job.statusText(),
len(job),
results[DONE],
results[DISPATCHED],
str(results[ERROR]) +
"""<button title="reset error frames" onclick="request('/reset_%s_0', null);" %s>R</button>""" % (job.id, "disabled=True" if not results[ERROR] else ""),
"yes" if handler.server.balancer.applyPriorities(job) else "no",
"yes" if handler.server.balancer.applyExceptions(job) else "no"
)
endTable()
output("<h2>Slaves</h2>")
startTable()
headerTable("name", "address", "last seen", "stats", "job")
for slave in handler.server.slaves:
rowTable(slave.name, slave.address[0], time.ctime(slave.last_seen), slave.stats, link(slave.job.name, "/html/job" + slave.job.id) if slave.job else "None")
endTable()
output("<h2>Configuration</h2>")
output("""<button title="remove all jobs" onclick="clear_jobs();">CLEAR JOB LIST</button>""")
startTable(caption = "Rules", class_style = "rules")
headerTable("type", "enabled", "description", "limit")
for rule in handler.server.balancer.rules:
rowTable(
"rating",
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
rule,
rule.str_limit() +
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else "&nbsp;"
)
for rule in handler.server.balancer.priorities:
rowTable(
"priority",
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
rule,
rule.str_limit() +
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else "&nbsp;"
)
for rule in handler.server.balancer.exceptions:
rowTable(
"exception",
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
rule,
rule.str_limit() +
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else "&nbsp;"
)
endTable()
output("</body></html>")
elif handler.path.startswith("/html/job"):
handler.send_head(content = "text/html")
job_id = handler.path[9:]
head("NetRender")
job = handler.server.getJobID(job_id)
if job:
output("<h2>Render Information</h2>")
job.initInfo()
startTable()
rowTable("resolution", "%ix%i at %i%%" % job.resolution)
endTable()
if job.type == netrender.model.JOB_BLENDER:
output("<h2>Files</h2>")
startTable()
headerTable("path")
tot_cache = 0
tot_fluid = 0
rowTable(job.files[0].filepath)
rowTable("Other Files", class_style = "toggle", extra = "onclick='toggleDisplay(&quot;.other&quot;, &quot;none&quot;, &quot;table-row&quot;)'")
for file in job.files:
if file.filepath.endswith(".bphys"):
tot_cache += 1
elif file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
tot_fluid += 1
else:
if file != job.files[0]:
rowTable(file.filepath, class_style = "other")
if tot_cache > 0:
rowTable("%i physic cache files" % tot_cache, class_style = "toggle", extra = "onclick='toggleDisplay(&quot;.cache&quot;, &quot;none&quot;, &quot;table-row&quot;)'")
for file in job.files:
if file.filepath.endswith(".bphys"):
rowTable(os.path.split(file.filepath)[1], class_style = "cache")
if tot_fluid > 0:
rowTable("%i fluid bake files" % tot_fluid, class_style = "toggle", extra = "onclick='toggleDisplay(&quot;.fluid&quot;, &quot;none&quot;, &quot;table-row&quot;)'")
for file in job.files:
if file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
rowTable(os.path.split(file.filepath)[1], class_style = "fluid")
endTable()
elif job.type == netrender.model.JOB_VCS:
output("<h2>Versioning</h2>")
startTable()
rowTable("System", job.version_info.system.name)
rowTable("Remote Path", job.version_info.rpath)
rowTable("Working Path", job.version_info.wpath)
rowTable("Revision", job.version_info.revision)
rowTable("Render File", job.files[0].filepath)
endTable()
if job.blacklist:
output("<h2>Blacklist</h2>")
startTable()
headerTable("name", "address")
for slave_id in job.blacklist:
slave = handler.server.slaves_map[slave_id]
rowTable(slave.name, slave.address[0])
endTable()
output("<h2>Frames</h2>")
startTable()
headerTable("no", "status", "render time", "slave", "log", "result", "")
for frame in job.frames:
rowTable(
frame.number,
frame.statusText(),
"%.1fs" % frame.time,
frame.slave.name if frame.slave else "&nbsp;",
link("view log", logURL(job_id, frame.number)) if frame.log_path else "&nbsp;",
link("view result", renderURL(job_id, frame.number)) + " [" +
tag("span", "show", attr="class='thumb' onclick='showThumb(%s, %i)'" % (job.id, frame.number)) + "]" if frame.status == DONE else "&nbsp;",
"<img name='thumb%i' title='hide thumbnails' src='' class='thumb' onclick='showThumb(%s, %i)'>" % (frame.number, job.id, frame.number)
)
endTable()
else:
output("no such job")
output("</body></html>")

@ -1,360 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys, os
import http, http.client, http.server, urllib
import subprocess, shutil, time, hashlib
import netrender.versioning as versioning
from netrender.utils import *
class LogFile:
def __init__(self, job_id = 0, slave_id = 0, frames = []):
self.job_id = job_id
self.slave_id = slave_id
self.frames = frames
def serialize(self):
return {
"job_id": self.job_id,
"slave_id": self.slave_id,
"frames": self.frames
}
@staticmethod
def materialize(data):
if not data:
return None
logfile = LogFile()
logfile.job_id = data["job_id"]
logfile.slave_id = data["slave_id"]
logfile.frames = data["frames"]
return logfile
class RenderSlave:
_slave_map = {}
def __init__(self):
self.id = ""
self.name = ""
self.address = ("",0)
self.stats = ""
self.total_done = 0
self.total_error = 0
self.last_seen = 0.0
def serialize(self):
return {
"id": self.id,
"name": self.name,
"address": self.address,
"stats": self.stats,
"total_done": self.total_done,
"total_error": self.total_error,
"last_seen": self.last_seen
}
@staticmethod
def materialize(data, cache = True):
if not data:
return None
slave_id = data["id"]
if cache and slave_id in RenderSlave._slave_map:
return RenderSlave._slave_map[slave_id]
slave = RenderSlave()
slave.id = slave_id
slave.name = data["name"]
slave.address = data["address"]
slave.stats = data["stats"]
slave.total_done = data["total_done"]
slave.total_error = data["total_error"]
slave.last_seen = data["last_seen"]
if cache:
RenderSlave._slave_map[slave_id] = slave
return slave
JOB_BLENDER = 1
JOB_PROCESS = 2
JOB_VCS = 3
JOB_TYPES = {
JOB_BLENDER: "Blender",
JOB_PROCESS: "Process",
JOB_VCS: "Versioned",
}
class VersioningInfo:
def __init__(self, info = None):
self._system = None
self.wpath = ""
self.rpath = ""
self.revision = ""
@property
def system(self):
return self._system
@system.setter
def system(self, value):
self._system = versioning.SYSTEMS[value]
def update(self):
self.system.update(self)
def serialize(self):
return {
"wpath": self.wpath,
"rpath": self.rpath,
"revision": self.revision,
"system": self.system.name
}
@staticmethod
def generate(system, path):
vs = VersioningInfo()
vs.wpath = path
vs.system = system
vs.rpath = vs.system.path(path)
vs.revision = vs.system.revision(path)
return vs
@staticmethod
def materialize(data):
if not data:
return None
vs = VersioningInfo()
vs.wpath = data["wpath"]
vs.rpath = data["rpath"]
vs.revision = data["revision"]
vs.system = data["system"]
return vs
class RenderFile:
def __init__(self, filepath = "", index = 0, start = -1, end = -1, signature=0):
self.filepath = filepath
self.original_path = filepath
self.signature = signature
self.index = index
self.start = start
self.end = end
def serialize(self):
return {
"filepath": self.filepath,
"original_path": self.original_path,
"index": self.index,
"start": self.start,
"end": self.end,
"signature": self.signature
}
@staticmethod
def materialize(data):
if not data:
return None
rfile = RenderFile(data["filepath"], data["index"], data["start"], data["end"], data["signature"])
rfile.original_path = data["original_path"]
return rfile
class RenderJob:
def __init__(self, job_info = None):
self.id = ""
self.type = JOB_BLENDER
self.name = ""
self.category = "None"
self.status = JOB_WAITING
self.files = []
self.chunks = 0
self.priority = 0
self.blacklist = []
self.version_info = None
self.resolution = None
self.usage = 0.0
self.last_dispatched = 0.0
self.frames = []
if job_info:
self.type = job_info.type
self.name = job_info.name
self.category = job_info.category
self.status = job_info.status
self.files = job_info.files
self.chunks = job_info.chunks
self.priority = job_info.priority
self.blacklist = job_info.blacklist
self.version_info = job_info.version_info
def hasRenderResult(self):
return self.type in (JOB_BLENDER, JOB_VCS)
def rendersWithBlender(self):
return self.type in (JOB_BLENDER, JOB_VCS)
def addFile(self, file_path, start=-1, end=-1, signed=True):
if signed:
signature = hashFile(file_path)
else:
signature = None
self.files.append(RenderFile(file_path, len(self.files), start, end, signature))
def addFrame(self, frame_number, command = ""):
frame = RenderFrame(frame_number, command)
self.frames.append(frame)
return frame
def __len__(self):
return len(self.frames)
def countFrames(self, status=QUEUED):
total = 0
for f in self.frames:
if f.status == status:
total += 1
return total
def countSlaves(self):
return len(set((frame.slave for frame in self.frames if frame.status == DISPATCHED)))
def statusText(self):
return JOB_STATUS_TEXT[self.status]
def framesStatus(self):
results = {
QUEUED: 0,
DISPATCHED: 0,
DONE: 0,
ERROR: 0
}
for frame in self.frames:
results[frame.status] += 1
return results
def __contains__(self, frame_number):
for f in self.frames:
if f.number == frame_number:
return True
else:
return False
def __getitem__(self, frame_number):
for f in self.frames:
if f.number == frame_number:
return f
else:
return None
def serialize(self, frames = None):
min_frame = min((f.number for f in frames)) if frames else -1
max_frame = max((f.number for f in frames)) if frames else -1
return {
"id": self.id,
"type": self.type,
"name": self.name,
"category": self.category,
"status": self.status,
"files": [f.serialize() for f in self.files if f.start == -1 or not frames or (f.start <= max_frame and f.end >= min_frame)],
"frames": [f.serialize() for f in self.frames if not frames or f in frames],
"chunks": self.chunks,
"priority": self.priority,
"usage": self.usage,
"blacklist": self.blacklist,
"last_dispatched": self.last_dispatched,
"version_info": self.version_info.serialize() if self.version_info else None,
"resolution": self.resolution
}
@staticmethod
def materialize(data):
if not data:
return None
job = RenderJob()
job.id = data["id"]
job.type = data["type"]
job.name = data["name"]
job.category = data["category"]
job.status = data["status"]
job.files = [RenderFile.materialize(f) for f in data["files"]]
job.frames = [RenderFrame.materialize(f) for f in data["frames"]]
job.chunks = data["chunks"]
job.priority = data["priority"]
job.usage = data["usage"]
job.blacklist = data["blacklist"]
job.last_dispatched = data["last_dispatched"]
job.resolution = data["resolution"]
version_info = data.get("version_info", None)
if version_info:
job.version_info = VersioningInfo.materialize(version_info)
return job
class RenderFrame:
def __init__(self, number = 0, command = ""):
self.number = number
self.time = 0
self.status = QUEUED
self.slave = None
self.command = command
def statusText(self):
return FRAME_STATUS_TEXT[self.status]
def serialize(self):
return {
"number": self.number,
"time": self.time,
"status": self.status,
"slave": None if not self.slave else self.slave.serialize(),
"command": self.command
}
@staticmethod
def materialize(data):
if not data:
return None
frame = RenderFrame()
frame.number = data["number"]
frame.time = data["time"]
frame.status = data["status"]
frame.slave = RenderSlave.materialize(data["slave"])
frame.command = data["command"]
return frame

@ -1,88 +0,0 @@
body {
background-color:#eee;
font-size:12px;
font-family: "Lucida Sans","Lucida Sans Unicode","Lucida Grande",Lucida,sans-serif;
}
a {
/*text-decoration:none;*/
color:#666;
}
a:hover {
color:#000;
}
h2 {
background-color:#ddd;
font-size:120%;
padding:5px;
}
h2 {
background-color:#ddd;
font-size:110%;
padding:5px;
}
table {
text-align:center;
border:0;
background-color:#ddd;
padding: 0px;
margin: 0px;
}
thead{
font-size:90%;
color:#555;
background-color:#ccc;
}
td {
border:0;
padding:2px;
padding-left:10px;
padding-right:10px;
margin-left:20px;
background-color:#ddd;
}
td:hover {
background-color:#ccc;
}
tr {
border:0;
}
button {
color: #111;
width: auto;
height: auto;
}
.toggle {
text-decoration: underline;
cursor: pointer;
}
.cache {
display: none;
}
.fluid {
display: none;
}
.other {
display: none;
}
.rules {
width: 60em;
text-align: left;
}
img.thumb {
display: none;
cursor: pointer;
}
span.thumb {
text-decoration: underline;
cursor: pointer;
}

@ -1,146 +0,0 @@
lastFrame = -1
maxFrame = -1
minFrame = -1
function request(url, data)
{
xmlhttp = new XMLHttpRequest();
xmlhttp.open("POST", url, false);
xmlhttp.send(data);
window.location.reload()
}
function edit(id, info)
{
request("/edit_" + id, info)
}
function clear_jobs()
{
var r=confirm("Also delete files on master?");
if (r==true) {
request('/clear', '{"clear":true}');
} else {
request('/clear', '{"clear":false}');
}
}
function cancel_job(id)
{
var r=confirm("Also delete files on master?");
if (r==true) {
request('/cancel_' + id, '{"clear":true}');
} else {
request('/cancel_' + id, '{"clear":false}');
}
}
function balance_edit(id, old_value)
{
var new_value = prompt("New limit", old_value);
if (new_value != null && new_value != "") {
request("/balance_limit", '{"' + id + '":"' + new_value + '"}');
}
}
function balance_enable(id, value)
{
request("/balance_enable", '{"' + id + '":' + value + "}");
}
function showThumb(job, frame)
{
if (lastFrame != -1) {
if (maxFrame != -1 && minFrame != -1) {
if (frame >= minFrame && frame <= maxFrame) {
for(i = minFrame; i <= maxFrame; i=i+1) {
toggleThumb(job, i);
}
minFrame = -1;
maxFrame = -1;
lastFrame = -1;
} else if (frame > maxFrame) {
for(i = maxFrame+1; i <= frame; i=i+1) {
toggleThumb(job, i);
}
maxFrame = frame;
lastFrame = frame;
} else {
for(i = frame; i <= minFrame-1; i=i+1) {
toggleThumb(job, i);
}
minFrame = frame;
lastFrame = frame;
}
} else if (frame == lastFrame) {
toggleThumb(job, frame);
} else if (frame < lastFrame) {
minFrame = frame;
maxFrame = lastFrame;
for(i = minFrame; i <= maxFrame-1; i=i+1) {
toggleThumb(job, i);
}
lastFrame = frame;
} else {
minFrame = lastFrame;
maxFrame = frame;
for(i = minFrame+1; i <= maxFrame; i=i+1) {
toggleThumb(job, i);
}
lastFrame = frame;
}
} else {
toggleThumb(job, frame);
}
}
function toggleThumb(job, frame)
{
img = document.images["thumb" + frame];
url = "/thumb_" + job + "_" + frame + ".jpg"
if (img.style.display == "block") {
img.style.display = "none";
img.src = "";
lastFrame = -1;
} else {
img.src = url;
img.style.display = "block";
lastFrame = frame;
}
}
function returnObjById( id )
{
if (document.getElementById)
var returnVar = document.getElementById(id);
else if (document.all)
var returnVar = document.all[id];
else if (document.layers)
var returnVar = document.layers[id];
return returnVar;
}
function toggleDisplay( className, value1, value2 )
{
style = getStyle(className)
if (style.style["display"] == value1) {
style.style["display"] = value2;
} else {
style.style["display"] = value1;
}
}
function getStyle(className) {
var classes = document.styleSheets[0].rules || document.styleSheets[0].cssRules
for(var x=0;x<classes.length;x++) {
if(classes[x].selectorText==className) {
return classes[x];
}
}
}

@ -1,564 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import sys, os
import http, http.client, http.server, urllib, socket
import webbrowser
import json
import netrender
from netrender.utils import *
import netrender.client as client
import netrender.model
import netrender.versioning as versioning
class RENDER_OT_netslave_bake(bpy.types.Operator):
'''NEED DESCRIPTION'''
bl_idname = "render.netslavebake"
bl_label = "Bake all in file"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
scene = context.scene
netsettings = scene.network_render
filename = bpy.data.filepath
path, name = os.path.split(filename)
root, ext = os.path.splitext(name)
default_path = path + os.sep + "blendcache_" + root + os.sep # need an API call for that
relative_path = os.sep + os.sep + "blendcache_" + root + os.sep
# Force all point cache next to the blend file
for object in bpy.data.objects:
for modifier in object.modifiers:
if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
modifier.settings.path = relative_path
bpy.ops.fluid.bake({"active_object": object, "scene": scene})
elif modifier.type == "CLOTH":
modifier.point_cache.frame_step = 1
modifier.point_cache.use_disk_cache = True
modifier.point_cache.use_external = False
elif modifier.type == "SOFT_BODY":
modifier.point_cache.frame_step = 1
modifier.point_cache.use_disk_cache = True
modifier.point_cache.use_external = False
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
modifier.domain_settings.point_cache.use_step = 1
modifier.domain_settings.point_cache.use_disk_cache = True
modifier.domain_settings.point_cache.use_external = False
# particles modifier are stupid and don't contain data
# we have to go through the object property
for psys in object.particle_systems:
psys.point_cache.use_step = 1
psys.point_cache.use_disk_cache = True
psys.point_cache.use_external = False
psys.point_cache.filepath = relative_path
bpy.ops.ptcache.bake_all()
#bpy.ops.wm.save_mainfile(filepath = path + os.sep + root + "_baked.blend")
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientanim(bpy.types.Operator):
'''Start rendering an animation on network'''
bl_idname = "render.netclientanim"
bl_label = "Animation on network"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
scene = context.scene
netsettings = scene.network_render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
# Sending file
scene.network_render.job_id = client.clientSendJob(conn, scene, True)
conn.close()
bpy.ops.render.render('INVOKE_AREA', animation=True)
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientrun(bpy.types.Operator):
'''Start network rendering service'''
bl_idname = "render.netclientstart"
bl_label = "Start Service"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
bpy.ops.render.render('INVOKE_AREA', animation=True)
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientsend(bpy.types.Operator):
'''Send Render Job to the Network'''
bl_idname = "render.netclientsend"
bl_label = "Send job"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
scene = context.scene
netsettings = scene.network_render
try:
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
# Sending file
scene.network_render.job_id = client.clientSendJob(conn, scene, True)
conn.close()
self.report('INFO', "Job sent to master")
except Exception as err:
self.report('ERROR', str(err))
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientsendframe(bpy.types.Operator):
'''Send Render Job with current frame to the Network'''
bl_idname = "render.netclientsendframe"
bl_label = "Send current frame job"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
scene = context.scene
netsettings = scene.network_render
try:
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
# Sending file
scene.network_render.job_id = client.clientSendJob(conn, scene, False)
conn.close()
self.report('INFO', "Job sent to master")
except Exception as err:
self.report('ERROR', str(err))
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientstatus(bpy.types.Operator):
'''Refresh the status of the current jobs'''
bl_idname = "render.netclientstatus"
bl_label = "Client Status"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
conn.request("GET", "/status")
response = conn.getresponse()
content = response.read()
print( response.status, response.reason )
jobs = (netrender.model.RenderJob.materialize(j) for j in json.loads(str(content, encoding='utf8')))
while(len(netsettings.jobs) > 0):
netsettings.jobs.remove(0)
netrender.jobs = []
for j in jobs:
netrender.jobs.append(j)
netsettings.jobs.add()
job = netsettings.jobs[-1]
j.results = j.framesStatus() # cache frame status
job.name = j.name
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientblacklistslave(bpy.types.Operator):
'''Operator documentation text, will be used for the operator tooltip and python docs.'''
bl_idname = "render.netclientblacklistslave"
bl_label = "Client Blacklist Slave"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
if netsettings.active_slave_index >= 0:
# deal with data
slave = netrender.slaves.pop(netsettings.active_slave_index)
netrender.blacklist.append(slave)
# deal with rna
netsettings.slaves_blacklist.add()
netsettings.slaves_blacklist[-1].name = slave.name
netsettings.slaves.remove(netsettings.active_slave_index)
netsettings.active_slave_index = -1
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientwhitelistslave(bpy.types.Operator):
'''Operator documentation text, will be used for the operator tooltip and python docs.'''
bl_idname = "render.netclientwhitelistslave"
bl_label = "Client Whitelist Slave"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
if netsettings.active_blacklisted_slave_index >= 0:
# deal with data
slave = netrender.blacklist.pop(netsettings.active_blacklisted_slave_index)
netrender.slaves.append(slave)
# deal with rna
netsettings.slaves.add()
netsettings.slaves[-1].name = slave.name
netsettings.slaves_blacklist.remove(netsettings.active_blacklisted_slave_index)
netsettings.active_blacklisted_slave_index = -1
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientslaves(bpy.types.Operator):
'''Refresh status about available Render slaves'''
bl_idname = "render.netclientslaves"
bl_label = "Client Slaves"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
conn.request("GET", "/slaves")
response = conn.getresponse()
content = response.read()
print( response.status, response.reason )
slaves = (netrender.model.RenderSlave.materialize(s) for s in json.loads(str(content, encoding='utf8')))
while(len(netsettings.slaves) > 0):
netsettings.slaves.remove(0)
netrender.slaves = []
for s in slaves:
for i in range(len(netrender.blacklist)):
slave = netrender.blacklist[i]
if slave.id == s.id:
netrender.blacklist[i] = s
netsettings.slaves_blacklist[i].name = s.name
break
else:
netrender.slaves.append(s)
netsettings.slaves.add()
slave = netsettings.slaves[-1]
slave.name = s.name
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientcancel(bpy.types.Operator):
'''Cancel the selected network rendering job.'''
bl_idname = "render.netclientcancel"
bl_label = "Client Cancel"
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return netsettings.active_job_index >= 0 and len(netsettings.jobs) > 0
def execute(self, context):
netsettings = context.scene.network_render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
job = netrender.jobs[netsettings.active_job_index]
conn.request("POST", cancelURL(job.id), json.dumps({'clear':False}))
response = conn.getresponse()
response.read()
print( response.status, response.reason )
netsettings.jobs.remove(netsettings.active_job_index)
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class RENDER_OT_netclientcancelall(bpy.types.Operator):
'''Cancel all running network rendering jobs.'''
bl_idname = "render.netclientcancelall"
bl_label = "Client Cancel All"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
conn.request("POST", "/clear", json.dumps({'clear':False}))
response = conn.getresponse()
response.read()
print( response.status, response.reason )
while(len(netsettings.jobs) > 0):
netsettings.jobs.remove(0)
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class netclientdownload(bpy.types.Operator):
'''Download render results from the network'''
bl_idname = "render.netclientdownload"
bl_label = "Client Download"
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return netsettings.active_job_index >= 0 and len(netsettings.jobs) > netsettings.active_job_index
def execute(self, context):
netsettings = context.scene.network_render
rd = context.scene.render
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
job_id = netrender.jobs[netsettings.active_job_index].id
conn.request("GET", "/status", headers={"job-id":job_id})
response = conn.getresponse()
if response.status != http.client.OK:
self.report('ERROR', "Job ID %i not defined on master" % job_id)
return {'ERROR'}
content = response.read()
job = netrender.model.RenderJob.materialize(json.loads(str(content, encoding='utf8')))
conn.close()
finished_frames = []
nb_error = 0
nb_missing = 0
for frame in job.frames:
if frame.status == DONE:
finished_frames.append(frame.number)
elif frame.status == ERROR:
nb_error += 1
else:
nb_missing += 1
if not finished_frames:
return
frame_ranges = []
first = None
last = None
for i in range(len(finished_frames)):
current = finished_frames[i]
if not first:
first = current
last = current
elif last + 1 == current:
last = current
if last + 1 < current or i + 1 == len(finished_frames):
if first < last:
frame_ranges.append((first, last))
else:
frame_ranges.append((first,))
first = current
last = current
getResults(netsettings.server_address, netsettings.server_port, job_id, job.resolution[0], job.resolution[1], job.resolution[2], frame_ranges)
if nb_error and nb_missing:
self.report('ERROR', "Results downloaded but skipped %i frames with errors and %i unfinished frames" % (nb_error, nb_missing))
elif nb_error:
self.report('ERROR', "Results downloaded but skipped %i frames with errors" % nb_error)
elif nb_missing:
self.report('WARNING', "Results downloaded but skipped %i unfinished frames" % nb_missing)
else:
self.report('INFO', "All results downloaded")
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class netclientscan(bpy.types.Operator):
'''Listen on network for master server broadcasting its address and port.'''
bl_idname = "render.netclientscan"
bl_label = "Client Scan"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
address, port = clientScan(self.report)
if address:
scene = context.scene
netsettings = scene.network_render
netsettings.server_address = address
netsettings.server_port = port
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class netclientvcsguess(bpy.types.Operator):
'''Guess VCS setting for the current file'''
bl_idname = "render.netclientvcsguess"
bl_label = "VCS Guess"
@classmethod
def poll(cls, context):
return True
def execute(self, context):
netsettings = context.scene.network_render
system = versioning.SYSTEMS.get(netsettings.vcs_system, None)
if system:
wpath, name = os.path.split(os.path.abspath(bpy.data.filepath))
rpath = system.path(wpath)
revision = system.revision(wpath)
netsettings.vcs_wpath = wpath
netsettings.vcs_rpath = rpath
netsettings.vcs_revision = revision
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)
class netclientweb(bpy.types.Operator):
'''Open new window with information about running rendering jobs'''
bl_idname = "render.netclientweb"
bl_label = "Open Master Monitor"
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return netsettings.server_address != "[default]"
def execute(self, context):
netsettings = context.scene.network_render
# open connection to make sure server exists
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
if conn:
conn.close()
webbrowser.open("http://%s:%i" % (netsettings.server_address, netsettings.server_port))
return {'FINISHED'}
def invoke(self, context, event):
return self.execute(context)

@ -1,150 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys, os
import subprocess
import bpy
from netrender.utils import *
import netrender.model
BLENDER_PATH = sys.argv[0]
def reset(job):
main_file = job.files[0]
job_full_path = main_file.filepath
if os.path.exists(job_full_path + ".bak"):
os.remove(job_full_path) # repathed file
os.renames(job_full_path + ".bak", job_full_path)
def update(job):
paths = []
main_file = job.files[0]
job_full_path = main_file.filepath
path, ext = os.path.splitext(job_full_path)
new_path = path + ".remap" + ext
# Disable for now. Partial repath should work anyway
#all = main_file.filepath != main_file.original_path
all = False
for rfile in job.files[1:]:
if all or rfile.original_path != rfile.filepath:
paths.append(rfile.original_path)
paths.append(rfile.filepath)
# Only update if needed
if paths:
process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-P", __file__, "--", new_path] + paths, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
process.wait()
os.renames(job_full_path, job_full_path + ".bak")
os.renames(new_path, job_full_path)
def process(paths):
def processPointCache(point_cache):
point_cache.use_external = False
def processFluid(fluid):
new_path = path_map.get(fluid.filepath, None)
if new_path:
fluid.path = new_path
path_map = {}
for i in range(0, len(paths), 2):
# special case for point cache
if paths[i].endswith(".bphys"):
pass # Don't need them in the map, they all use the default external path
# NOTE: This is probably not correct all the time, need to be fixed.
# special case for fluids
elif paths[i].endswith(".bobj.gz"):
path_map[os.path.split(paths[i])[0]] = os.path.split(paths[i+1])[0]
else:
path_map[os.path.split(paths[i])[1]] = paths[i+1]
# TODO original paths aren't really the orignal path (they are the normalized path
# so we repath using the filenames only.
###########################
# LIBRARIES
###########################
for lib in bpy.data.libraries:
file_path = bpy.path.abspath(lib.filepath)
new_path = path_map.get(os.path.split(file_path)[1], None)
if new_path:
lib.filepath = new_path
###########################
# IMAGES
###########################
for image in bpy.data.images:
if image.source == "FILE" and not image.packed_file:
file_path = bpy.path.abspath(image.filepath)
new_path = path_map.get(os.path.split(file_path)[1], None)
if new_path:
image.filepath = new_path
###########################
# FLUID + POINT CACHE
###########################
for object in bpy.data.objects:
for modifier in object.modifiers:
if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
processFluid(settings)
elif modifier.type == "CLOTH":
processPointCache(modifier.point_cache)
elif modifier.type == "SOFT_BODY":
processPointCache(modifier.point_cache)
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
processPointCache(modifier.domain_settings.point_cache_low)
if modifier.domain_settings.use_high_resolution:
processPointCache(modifier.domain_settings.point_cache_high)
elif modifier.type == "MULTIRES" and modifier.is_external:
file_path = bpy.path.abspath(modifier.filepath)
new_path = path_map.get(file_path, None)
if new_path:
modifier.filepath = new_path
# particles modifier are stupid and don't contain data
# we have to go through the object property
for psys in object.particle_systems:
processPointCache(psys.point_cache)
if __name__ == "__main__":
try:
i = sys.argv.index("--")
except:
i = 0
if i:
new_path = sys.argv[i+1]
args = sys.argv[i+2:]
process(args)
bpy.ops.wm.save_as_mainfile(filepath=new_path, check_existing=False)

@ -1,349 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys, os, platform, shutil
import http, http.client, http.server, urllib
import subprocess, time
import json
import bpy
from netrender.utils import *
import netrender.model
import netrender.repath
import netrender.thumbnail as thumbnail
BLENDER_PATH = sys.argv[0]
CANCEL_POLL_SPEED = 2
MAX_TIMEOUT = 10
INCREMENT_TIMEOUT = 1
MAX_CONNECT_TRY = 10
try:
system = platform.system()
except UnicodeDecodeError:
import sys
system = sys.platform
if system in ('Windows', 'win32') and platform.version() >= '5': # Error mode is only available on Win2k or higher, that's version 5
import ctypes
def SetErrorMode():
val = ctypes.windll.kernel32.SetErrorMode(0x0002)
ctypes.windll.kernel32.SetErrorMode(val | 0x0002)
return val
def RestoreErrorMode(val):
ctypes.windll.kernel32.SetErrorMode(val)
else:
def SetErrorMode():
return 0
def RestoreErrorMode(val):
pass
def clearSlave(path):
shutil.rmtree(path)
def slave_Info():
sysname, nodename, release, version, machine, processor = platform.uname()
slave = netrender.model.RenderSlave()
slave.name = nodename
slave.stats = sysname + " " + release + " " + machine + " " + processor
return slave
def testCancel(conn, job_id, frame_number):
conn.request("HEAD", "/status", headers={"job-id":job_id, "job-frame": str(frame_number)})
# canceled if job isn't found anymore
if responseStatus(conn) == http.client.NO_CONTENT:
return True
else:
return False
def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
job_full_path = prefixPath(JOB_PREFIX, rfile.filepath, main_path)
found = os.path.exists(job_full_path)
if found and rfile.signature != None:
found_signature = hashFile(job_full_path)
found = found_signature == rfile.signature
if not found:
print("Found file %s at %s but signature mismatch!" % (rfile.filepath, job_full_path))
job_full_path = prefixPath(JOB_PREFIX, rfile.filepath, main_path, force = True)
if not found:
# Force prefix path if not found
job_full_path = prefixPath(JOB_PREFIX, rfile.filepath, main_path, force = True)
temp_path = os.path.join(JOB_PREFIX, "slave.temp")
conn.request("GET", fileURL(job_id, rfile.index), headers={"slave-id":slave_id})
response = conn.getresponse()
if response.status != http.client.OK:
return None # file for job not returned by server, need to return an error code to server
f = open(temp_path, "wb")
buf = response.read(1024)
while buf:
f.write(buf)
buf = response.read(1024)
f.close()
os.renames(temp_path, job_full_path)
rfile.filepath = job_full_path
return job_full_path
def breakable_timeout(timeout):
for i in range(timeout):
time.sleep(1)
if engine.test_break():
break
def render_slave(engine, netsettings, threads):
timeout = 1
bisleep = BreakableIncrementedSleep(INCREMENT_TIMEOUT, 1, MAX_TIMEOUT, engine.test_break)
engine.update_stats("", "Network render node initiation")
conn = clientConnection(netsettings.server_address, netsettings.server_port)
if not conn:
timeout = 1
print("Connection failed, will try connecting again at most %i times" % MAX_CONNECT_TRY)
bisleep.reset()
for i in range(MAX_CONNECT_TRY):
bisleep.sleep()
conn = clientConnection(netsettings.server_address, netsettings.server_port)
if conn or engine.test_break():
break
print("Retry %i failed, waiting %is before retrying" % (i + 1, bisleep.current))
if conn:
conn.request("POST", "/slave", json.dumps(slave_Info().serialize()))
response = conn.getresponse()
response.read()
slave_id = response.getheader("slave-id")
NODE_PREFIX = os.path.join(bpy.path.abspath(netsettings.path), "slave_" + slave_id)
if not os.path.exists(NODE_PREFIX):
os.mkdir(NODE_PREFIX)
engine.update_stats("", "Network render connected to master, waiting for jobs")
while not engine.test_break():
conn.request("GET", "/job", headers={"slave-id":slave_id})
response = conn.getresponse()
if response.status == http.client.OK:
bisleep.reset()
job = netrender.model.RenderJob.materialize(json.loads(str(response.read(), encoding='utf8')))
engine.update_stats("", "Network render processing job from master")
JOB_PREFIX = os.path.join(NODE_PREFIX, "job_" + job.id)
if not os.path.exists(JOB_PREFIX):
os.mkdir(JOB_PREFIX)
# set tempdir for fsaa temp files
# have to set environ var because render is done in a subprocess and that's the easiest way to propagate the setting
os.environ["TMP"] = JOB_PREFIX
if job.type == netrender.model.JOB_BLENDER:
job_path = job.files[0].filepath # path of main file
main_path, main_file = os.path.split(job_path)
job_full_path = testFile(conn, job.id, slave_id, job.files[0], JOB_PREFIX)
print("Fullpath", job_full_path)
print("File:", main_file, "and %i other files" % (len(job.files) - 1,))
for rfile in job.files[1:]:
testFile(conn, job.id, slave_id, rfile, JOB_PREFIX, main_path)
print("\t", rfile.filepath)
netrender.repath.update(job)
engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
elif job.type == netrender.model.JOB_VCS:
if not job.version_info:
# Need to return an error to server, incorrect job type
pass
job_path = job.files[0].filepath # path of main file
main_path, main_file = os.path.split(job_path)
job.version_info.update()
# For VCS jobs, file path is relative to the working copy path
job_full_path = os.path.join(job.version_info.wpath, job_path)
engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
# announce log to master
logfile = netrender.model.LogFile(job.id, slave_id, [frame.number for frame in job.frames])
conn.request("POST", "/log", bytes(json.dumps(logfile.serialize()), encoding='utf8'))
response = conn.getresponse()
response.read()
first_frame = job.frames[0].number
# start render
start_t = time.time()
if job.rendersWithBlender():
frame_args = []
for frame in job.frames:
print("frame", frame.number)
frame_args += ["-f", str(frame.number)]
val = SetErrorMode()
process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-t", str(threads), "-o", os.path.join(JOB_PREFIX, "######"), "-E", "BLENDER_RENDER", "-F", "MULTILAYER"] + frame_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
RestoreErrorMode(val)
elif job.type == netrender.model.JOB_PROCESS:
command = job.frames[0].command
val = SetErrorMode()
process = subprocess.Popen(command.split(" "), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
RestoreErrorMode(val)
headers = {"slave-id":slave_id}
cancelled = False
stdout = bytes()
run_t = time.time()
while not cancelled and process.poll() is None:
stdout += process.stdout.read(1024)
current_t = time.time()
cancelled = engine.test_break()
if current_t - run_t > CANCEL_POLL_SPEED:
# update logs if needed
if stdout:
# (only need to update on one frame, they are linked
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
response = conn.getresponse()
response.read()
# Also output on console
if netsettings.use_slave_output_log:
print(str(stdout, encoding='utf8'), end="")
stdout = bytes()
run_t = current_t
if testCancel(conn, job.id, first_frame):
cancelled = True
if job.type == netrender.model.JOB_BLENDER:
netrender.repath.reset(job)
# read leftovers if needed
stdout += process.stdout.read()
if cancelled:
# kill process if needed
if process.poll() is None:
try:
process.terminate()
except OSError:
pass
continue # to next frame
# flush the rest of the logs
if stdout:
# Also output on console
if netsettings.use_slave_thumb:
print(str(stdout, encoding='utf8'), end="")
# (only need to update on one frame, they are linked
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
if responseStatus(conn) == http.client.NO_CONTENT:
continue
total_t = time.time() - start_t
avg_t = total_t / len(job.frames)
status = process.returncode
print("status", status)
headers = {"job-id":job.id, "slave-id":slave_id, "job-time":str(avg_t)}
if status == 0: # non zero status is error
headers["job-result"] = str(DONE)
for frame in job.frames:
headers["job-frame"] = str(frame.number)
if job.hasRenderResult():
# send image back to server
filename = os.path.join(JOB_PREFIX, "%06d.exr" % frame.number)
# thumbnail first
if netsettings.use_slave_thumb:
thumbname = thumbnail.generate(filename)
if thumbname:
f = open(thumbname, 'rb')
conn.request("PUT", "/thumb", f, headers=headers)
f.close()
responseStatus(conn)
f = open(filename, 'rb')
conn.request("PUT", "/render", f, headers=headers)
f.close()
if responseStatus(conn) == http.client.NO_CONTENT:
continue
elif job.type == netrender.model.JOB_PROCESS:
conn.request("PUT", "/render", headers=headers)
if responseStatus(conn) == http.client.NO_CONTENT:
continue
else:
headers["job-result"] = str(ERROR)
for frame in job.frames:
headers["job-frame"] = str(frame.number)
# send error result back to server
conn.request("PUT", "/render", headers=headers)
if responseStatus(conn) == http.client.NO_CONTENT:
continue
engine.update_stats("", "Network render connected to master, waiting for jobs")
else:
bisleep.sleep()
conn.close()
if netsettings.use_slave_clear:
clearSlave(NODE_PREFIX)
if __name__ == "__main__":
pass

@ -1,81 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys, os
import subprocess
import bpy
def generate(filename, external=True):
if external:
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", "-P", __file__, "--", filename], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while process.poll() is None:
process.stdout.read(1024) # empty buffer to be sure
process.stdout.read()
return _thumbname(filename)
else:
return _internal(filename)
def _thumbname(filename):
root = os.path.splitext(filename)[0]
return root + ".jpg"
def _internal(filename):
imagename = os.path.split(filename)[1]
thumbname = _thumbname(filename)
if os.path.exists(thumbname):
return thumbname
if bpy:
scene = bpy.data.scenes[0] # FIXME, this is dodgy!
scene.render.file_format = "JPEG"
scene.render.file_quality = 90
# remove existing image, if there's a leftover (otherwise open changes the name)
if imagename in bpy.data.images:
img = bpy.data.images[imagename]
bpy.data.images.remove(img)
bpy.ops.image.open(filepath=filename)
img = bpy.data.images[imagename]
img.save_render(thumbname, scene=scene)
img.user_clear()
bpy.data.images.remove(img)
try:
process = subprocess.Popen(["convert", thumbname, "-resize", "300x300", thumbname])
process.wait()
return thumbname
except Exception as exp:
print("Error while generating thumbnail")
print(exp)
return None
if __name__ == "__main__":
import bpy
try:
start = sys.argv.index("--") + 1
except ValueError:
start = 0
for filename in sys.argv[start:]:
generate(filename, external=False)

@ -1,543 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
import sys, os
import http, http.client, http.server, urllib
import subprocess, shutil, time, hashlib
import netrender
import netrender.slave as slave
import netrender.master as master
from netrender.utils import *
VERSION = b"0.3"
PATH_PREFIX = "/tmp/"
QUEUED = 0
DISPATCHED = 1
DONE = 2
ERROR = 3
LAST_ADDRESS_TEST = 0
def base_poll(cls, context):
rd = context.scene.render
return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
def init_file():
if netrender.init_file != bpy.data.filepath:
netrender.init_file = bpy.data.filepath
netrender.init_data = True
netrender.valid_address = False
def init_data(netsettings):
init_file()
if netrender.init_data:
netrender.init_data = False
netsettings.active_slave_index = 0
while(len(netsettings.slaves) > 0):
netsettings.slaves.remove(0)
netsettings.active_blacklisted_slave_index = 0
while(len(netsettings.slaves_blacklist) > 0):
netsettings.slaves_blacklist.remove(0)
netsettings.active_job_index = 0
while(len(netsettings.jobs) > 0):
netsettings.jobs.remove(0)
def verify_address(netsettings):
global LAST_ADDRESS_TEST
init_file()
if LAST_ADDRESS_TEST + 30 < time.time():
LAST_ADDRESS_TEST = time.time()
try:
conn = clientConnection(netsettings.server_address, netsettings.server_port, scan = False, timeout = 1)
except:
conn = None
if conn:
netrender.valid_address = True
conn.close()
else:
netrender.valid_address = False
return netrender.valid_address
class NeedValidAddress():
@classmethod
def poll(cls, context):
return super().poll(context) and verify_address(context.scene.network_render)
class NetRenderButtonsPanel():
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "render"
# COMPAT_ENGINES must be defined in each subclass, external engines can add themselves here
@classmethod
def poll(cls, context):
rd = context.scene.render
return rd.engine == 'NET_RENDER' and rd.use_game_engine == False
# Setting panel, use in the scene for now.
class RENDER_PT_network_settings(NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Network Settings"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
return super().poll(context)
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
verify_address(netsettings)
layout.prop(netsettings, "mode", expand=True)
if netsettings.mode in ("RENDER_MASTER", "RENDER_SLAVE"):
layout.operator("render.netclientstart", icon='PLAY')
layout.prop(netsettings, "path")
split = layout.split(percentage=0.7)
col = split.column()
col.label(text="Server Address:")
col.prop(netsettings, "server_address", text="")
col = split.column()
col.label(text="Port:")
col.prop(netsettings, "server_port", text="")
if netsettings.mode != "RENDER_MASTER":
layout.operator("render.netclientscan", icon='FILE_REFRESH', text="")
if not netrender.valid_address:
layout.label(text="No master at specified address")
layout.operator("render.netclientweb", icon='QUESTION')
class RENDER_PT_network_slave_settings(NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Slave Settings"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
scene = context.scene
return super().poll(context) and scene.network_render.mode == "RENDER_SLAVE"
def draw(self, context):
layout = self.layout
rd = context.scene.render
netsettings = context.scene.network_render
layout.prop(netsettings, "use_slave_clear")
layout.prop(netsettings, "use_slave_thumb")
layout.prop(netsettings, "use_slave_output_log")
layout.label(text="Threads:")
layout.prop(rd, "threads_mode", expand=True)
col = layout.column()
col.enabled = rd.threads_mode == 'FIXED'
col.prop(rd, "threads")
class RENDER_PT_network_master_settings(NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Master Settings"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
scene = context.scene
return super().poll(context) and scene.network_render.mode == "RENDER_MASTER"
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
layout.prop(netsettings, "use_master_broadcast")
layout.prop(netsettings, "use_master_clear")
class RENDER_PT_network_job(NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Job Settings"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
scene = context.scene
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
verify_address(netsettings)
if netsettings.server_address != "[default]":
layout.operator("render.netclientanim", icon='RENDER_ANIMATION')
layout.operator("render.netclientsend", icon='FILE_BLEND')
layout.operator("render.netclientsendframe", icon='RENDER_STILL')
if netsettings.job_id:
row = layout.row()
row.operator("render.render", text="Get Image", icon='RENDER_STILL')
row.operator("render.render", text="Get Animation", icon='RENDER_ANIMATION').animation = True
split = layout.split(percentage=0.3)
col = split.column()
col.label(text="Type:")
col.label(text="Name:")
col.label(text="Category:")
col = split.column()
col.prop(netsettings, "job_type", text="")
col.prop(netsettings, "job_name", text="")
col.prop(netsettings, "job_category", text="")
row = layout.row()
row.prop(netsettings, "priority")
row.prop(netsettings, "chunks")
class RENDER_PT_network_job_vcs(NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "VCS Job Settings"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
scene = context.scene
return (super().poll(context)
and scene.network_render.mode == "RENDER_CLIENT"
and scene.network_render.job_type == "JOB_VCS")
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
layout.operator("render.netclientvcsguess", icon='FILE_REFRESH', text="")
layout.prop(netsettings, "vcs_system")
layout.prop(netsettings, "vcs_revision")
layout.prop(netsettings, "vcs_rpath")
layout.prop(netsettings, "vcs_wpath")
class RENDER_PT_network_slaves(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Slaves Status"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
row = layout.row()
row.template_list(netsettings, "slaves", netsettings, "active_slave_index", rows=2)
sub = row.column(align=True)
sub.operator("render.netclientslaves", icon='FILE_REFRESH', text="")
sub.operator("render.netclientblacklistslave", icon='ZOOMOUT', text="")
if len(netrender.slaves) > netsettings.active_slave_index >= 0:
layout.separator()
slave = netrender.slaves[netsettings.active_slave_index]
layout.label(text="Name: " + slave.name)
layout.label(text="Address: " + slave.address[0])
layout.label(text="Seen: " + time.ctime(slave.last_seen))
layout.label(text="Stats: " + slave.stats)
class RENDER_PT_network_slaves_blacklist(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Slaves Blacklist"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
row = layout.row()
row.template_list(netsettings, "slaves_blacklist", netsettings, "active_blacklisted_slave_index", rows=2)
sub = row.column(align=True)
sub.operator("render.netclientwhitelistslave", icon='ZOOMOUT', text="")
if len(netrender.blacklist) > netsettings.active_blacklisted_slave_index >= 0:
layout.separator()
slave = netrender.blacklist[netsettings.active_blacklisted_slave_index]
layout.label(text="Name: " + slave.name)
layout.label(text="Address: " + slave.address[0])
layout.label(text="Seen: " + time.ctime(slave.last_seen))
layout.label(text="Stats: " + slave.stats)
class RENDER_PT_network_jobs(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Jobs"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
def draw(self, context):
layout = self.layout
netsettings = context.scene.network_render
row = layout.row()
row.template_list(netsettings, "jobs", netsettings, "active_job_index", rows=2)
sub = row.column(align=True)
sub.operator("render.netclientstatus", icon='FILE_REFRESH', text="")
sub.operator("render.netclientcancel", icon='ZOOMOUT', text="")
sub.operator("render.netclientcancelall", icon='PANEL_CLOSE', text="")
sub.operator("render.netclientdownload", icon='RENDER_ANIMATION', text="")
if len(netrender.jobs) > netsettings.active_job_index >= 0:
layout.separator()
job = netrender.jobs[netsettings.active_job_index]
layout.label(text="Name: %s" % job.name)
layout.label(text="Length: %04i" % len(job))
layout.label(text="Done: %04i" % job.results[DONE])
layout.label(text="Error: %04i" % job.results[ERROR])
import properties_render
class RENDER_PT_network_output(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
bl_label = "Output"
COMPAT_ENGINES = {'NET_RENDER'}
@classmethod
def poll(cls, context):
netsettings = context.scene.network_render
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
draw = properties_render.RENDER_PT_output.draw
def addProperties():
class NetRenderSettings(bpy.types.PropertyGroup):
pass
class NetRenderSlave(bpy.types.PropertyGroup):
pass
class NetRenderJob(bpy.types.PropertyGroup):
pass
bpy.utils.register_class(NetRenderSettings)
bpy.utils.register_class(NetRenderSlave)
bpy.utils.register_class(NetRenderJob)
from bpy.props import PointerProperty, StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty
bpy.types.Scene.network_render = PointerProperty(type=NetRenderSettings, name="Network Render", description="Network Render Settings")
NetRenderSettings.server_address = StringProperty(
name="Server address",
description="IP or name of the master render server",
maxlen = 128,
default = "[default]")
NetRenderSettings.server_port = IntProperty(
name="Server port",
description="port of the master render server",
default = 8000,
min=1,
max=65535)
NetRenderSettings.use_master_broadcast = BoolProperty(
name="Broadcast",
description="broadcast master server address on local network",
default = True)
NetRenderSettings.use_slave_clear = BoolProperty(
name="Clear on exit",
description="delete downloaded files on exit",
default = True)
NetRenderSettings.use_slave_thumb = BoolProperty(
name="Generate thumbnails",
description="Generate thumbnails on slaves instead of master",
default = False)
NetRenderSettings.use_slave_output_log = BoolProperty(
name="Output render log on console",
description="Output render text log to console as well as sending it to the master",
default = True)
NetRenderSettings.use_master_clear = BoolProperty(
name="Clear on exit",
description="delete saved files on exit",
default = False)
default_path = os.environ.get("TEMP")
if not default_path:
if os.name == 'nt':
default_path = "c:/tmp/"
else:
default_path = "/tmp/"
elif not default_path.endswith(os.sep):
default_path += os.sep
NetRenderSettings.path = StringProperty(
name="Path",
description="Path for temporary files",
maxlen = 128,
default = default_path,
subtype='FILE_PATH')
NetRenderSettings.job_type = EnumProperty(
items=(
("JOB_BLENDER", "Blender", "Standard Blender Job"),
("JOB_PROCESS", "Process", "Custom Process Job"),
("JOB_VCS", "VCS", "Version Control System Managed Job"),
),
name="Job Type",
description="Type of render job",
default="JOB_BLENDER")
NetRenderSettings.job_name = StringProperty(
name="Job name",
description="Name of the job",
maxlen = 128,
default = "[default]")
NetRenderSettings.job_category = StringProperty(
name="Job category",
description="Category of the job",
maxlen = 128,
default = "")
NetRenderSettings.chunks = IntProperty(
name="Chunks",
description="Number of frame to dispatch to each slave in one chunk",
default = 5,
min=1,
max=65535)
NetRenderSettings.priority = IntProperty(
name="Priority",
description="Priority of the job",
default = 1,
min=1,
max=10)
NetRenderSettings.vcs_wpath = StringProperty(
name="Working Copy",
description="Path of the local working copy",
maxlen = 1024,
default = "")
NetRenderSettings.vcs_rpath = StringProperty(
name="Remote Path",
description="Path of the server copy (protocol specific)",
maxlen = 1024,
default = "")
NetRenderSettings.vcs_revision = StringProperty(
name="Revision",
description="Revision for this job",
maxlen = 256,
default = "")
NetRenderSettings.vcs_system = StringProperty(
name="VCS",
description="Version Control System",
maxlen = 64,
default = "Subversion")
NetRenderSettings.job_id = StringProperty(
name="Network job id",
description="id of the last sent render job",
maxlen = 64,
default = "")
NetRenderSettings.active_slave_index = IntProperty(
name="Index of the active slave",
description="",
default = -1,
min= -1,
max=65535)
NetRenderSettings.active_blacklisted_slave_index = IntProperty(
name="Index of the active slave",
description="",
default = -1,
min= -1,
max=65535)
NetRenderSettings.active_job_index = IntProperty(
name="Index of the active job",
description="",
default = -1,
min= -1,
max=65535)
NetRenderSettings.mode = EnumProperty(
items=(
("RENDER_CLIENT", "Client", "Act as render client"),
("RENDER_MASTER", "Master", "Act as render master"),
("RENDER_SLAVE", "Slave", "Act as render slave"),
),
name="Network mode",
description="Mode of operation of this instance",
default="RENDER_CLIENT")
NetRenderSettings.slaves = CollectionProperty(type=NetRenderSlave, name="Slaves", description="")
NetRenderSettings.slaves_blacklist = CollectionProperty(type=NetRenderSlave, name="Slaves Blacklist", description="")
NetRenderSettings.jobs = CollectionProperty(type=NetRenderJob, name="Job List", description="")
NetRenderSlave.name = StringProperty(
name="Name of the slave",
description="",
maxlen = 64,
default = "")
NetRenderJob.name = StringProperty(
name="Name of the job",
description="",
maxlen = 128,
default = "")

@ -1,313 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys, os
import re
import http, http.client, http.server, urllib, socket
import subprocess, shutil, time, hashlib, zlib
import netrender.model
try:
import bpy
except:
bpy = None
VERSION = bytes("1.3", encoding='utf8')
# Jobs status
JOB_WAITING = 0 # before all data has been entered
JOB_PAUSED = 1 # paused by user
JOB_FINISHED = 2 # finished rendering
JOB_QUEUED = 3 # ready to be dispatched
JOB_STATUS_TEXT = {
JOB_WAITING: "Waiting",
JOB_PAUSED: "Paused",
JOB_FINISHED: "Finished",
JOB_QUEUED: "Queued"
}
# Frames status
QUEUED = 0
DISPATCHED = 1
DONE = 2
ERROR = 3
FRAME_STATUS_TEXT = {
QUEUED: "Queued",
DISPATCHED: "Dispatched",
DONE: "Done",
ERROR: "Error"
}
class DirectoryContext:
def __init__(self, path):
self.path = path
def __enter__(self):
self.curdir = os.path.abspath(os.curdir)
os.chdir(self.path)
def __exit__(self, exc_type, exc_value, traceback):
os.chdir(self.curdir)
class BreakableIncrementedSleep:
def __init__(self, increment, default_timeout, max_timeout, break_fct):
self.increment = increment
self.default = default_timeout
self.max = max_timeout
self.current = self.default
self.break_fct = break_fct
def reset(self):
self.current = self.default
def increase(self):
self.current = min(self.current + self.increment, self.max)
def sleep(self):
for i in range(self.current):
time.sleep(1)
if self.break_fct():
break
self.increase()
def responseStatus(conn):
response = conn.getresponse()
response.read()
return response.status
def reporting(report, message, errorType = None):
if errorType:
t = 'ERROR'
else:
t = 'INFO'
if report:
report(t, message)
return None
elif errorType:
raise errorType(message)
else:
return None
def clientScan(report = None):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.settimeout(30)
s.bind(('', 8000))
buf, address = s.recvfrom(64)
address = address[0]
port = int(str(buf, encoding='utf8'))
reporting(report, "Master server found")
return (address, port)
except socket.timeout:
reporting(report, "No master server on network", IOError)
return ("", 8000) # return default values
def clientConnection(address, port, report = None, scan = True, timeout = 5):
if address == "[default]":
# calling operator from python is fucked, scene isn't in context
# if bpy:
# bpy.ops.render.netclientscan()
# else:
if not scan:
return None
address, port = clientScan()
if address == "":
return None
try:
conn = http.client.HTTPConnection(address, port, timeout = timeout)
if conn:
if clientVerifyVersion(conn):
return conn
else:
conn.close()
reporting(report, "Incorrect master version", ValueError)
except BaseException as err:
if report:
report('ERROR', str(err))
return None
else:
print(err)
return None
def clientVerifyVersion(conn):
conn.request("GET", "/version")
response = conn.getresponse()
if response.status != http.client.OK:
conn.close()
return False
server_version = response.read()
if server_version != VERSION:
print("Incorrect server version!")
print("expected", str(VERSION, encoding='utf8'), "received", str(server_version, encoding='utf8'))
return False
return True
def fileURL(job_id, file_index):
return "/file_%s_%i" % (job_id, file_index)
def logURL(job_id, frame_number):
return "/log_%s_%i.log" % (job_id, frame_number)
def renderURL(job_id, frame_number):
return "/render_%s_%i.exr" % (job_id, frame_number)
def cancelURL(job_id):
return "/cancel_%s" % (job_id)
def hashFile(path):
f = open(path, "rb")
value = hashData(f.read())
f.close()
return value
def hashData(data):
m = hashlib.md5()
m.update(data)
return m.hexdigest()
def prefixPath(prefix_directory, file_path, prefix_path, force = False):
if (os.path.isabs(file_path) or
len(file_path) >= 3 and (file_path[1:3] == ":/" or file_path[1:3] == ":\\") or # Windows absolute path don't count as absolute on unix, have to handle them myself
file_path[0] == "/" or file_path[0] == "\\"): # and vice versa
# if an absolute path, make sure path exists, if it doesn't, use relative local path
full_path = file_path
if force or not os.path.exists(full_path):
p, n = os.path.split(os.path.normpath(full_path))
if prefix_path and p.startswith(prefix_path):
if len(prefix_path) < len(p):
directory = os.path.join(prefix_directory, p[len(prefix_path)+1:]) # +1 to remove separator
if not os.path.exists(directory):
os.mkdir(directory)
else:
directory = prefix_directory
full_path = os.path.join(directory, n)
else:
full_path = os.path.join(prefix_directory, n)
else:
full_path = os.path.join(prefix_directory, file_path)
return full_path
def getResults(server_address, server_port, job_id, resolution_x, resolution_y, resolution_percentage, frame_ranges):
if bpy.app.debug:
print("=============================================")
print("============= FETCHING RESULTS ==============")
frame_arguments = []
for r in frame_ranges:
if len(r) == 2:
frame_arguments.extend(["-s", str(r[0]), "-e", str(r[1]), "-a"])
else:
frame_arguments.extend(["-f", str(r[0])])
filepath = os.path.join(bpy.app.tempdir, "netrender_temp.blend")
bpy.ops.wm.save_as_mainfile(filepath=filepath, copy=True, check_existing=False)
arguments = [sys.argv[0], "-b", "-noaudio", filepath, "-o", bpy.path.abspath(bpy.context.scene.render.filepath), "-P", __file__] + frame_arguments + ["--", "GetResults", server_address, str(server_port), job_id, str(resolution_x), str(resolution_y), str(resolution_percentage)]
if bpy.app.debug:
print("Starting subprocess:")
print(" ".join(arguments))
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while process.poll() is None:
stdout = process.stdout.read(1024)
if bpy.app.debug:
print(str(stdout, encoding='utf-8'), end="")
# read leftovers if needed
stdout = process.stdout.read()
if bpy.app.debug:
print(str(stdout, encoding='utf-8'))
os.remove(filepath)
if bpy.app.debug:
print("=============================================")
return
def _getResults(server_address, server_port, job_id, resolution_x, resolution_y, resolution_percentage):
render = bpy.context.scene.render
netsettings = bpy.context.scene.network_render
netsettings.server_address = server_address
netsettings.server_port = int(server_port)
netsettings.job_id = job_id
render.engine = 'NET_RENDER'
render.resolution_x = int(resolution_x)
render.resolution_y = int(resolution_y)
render.resolution_percentage = int(resolution_percentage)
render.use_full_sample = False
render.use_compositing = False
render.use_border = False
def getFileInfo(filepath, infos):
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", filepath, "-P", __file__, "--", "FileInfo"] + infos, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = bytes()
while process.poll() is None:
stdout += process.stdout.read(1024)
# read leftovers if needed
stdout += process.stdout.read()
stdout = str(stdout, encoding="utf8")
values = [eval(v[1:].strip()) for v in stdout.split("\n") if v.startswith("$")]
return values
if __name__ == "__main__":
try:
start = sys.argv.index("--") + 1
except ValueError:
start = 0
action, *args = sys.argv[start:]
if action == "FileInfo":
for info in args:
print("$", eval(info))
elif action == "GetResults":
_getResults(args[0], args[1], args[2], args[3], args[4], args[5])

@ -1,72 +0,0 @@
import sys, os
import re
import subprocess
from netrender.utils import *
class AbstractVCS:
name = "ABSTRACT VCS"
def __init__(self):
pass
def update(self, info):
"""update(info)
Update a working copy to the specified revision.
If working copy doesn't exist, do a full get from server to create it.
[info] model.VersioningInfo instance, specifies the working path, remote path and version number."""
pass
def revision(self, path):
"""revision(path)
return the current revision of the specified working copy path"""
pass
def path(self, path):
"""path(path)
return the remote path of the specified working copy path"""
pass
class Subversion(AbstractVCS):
name = "Subversion"
def __init__(self):
super().__init__()
self.version_exp = re.compile("([0-9]*)")
self.path_exp = re.compile("URL: (.*)")
def update(self, info):
if not os.path.exists(info.wpath):
base, folder = os.path.split(info.wpath)
with DirectoryContext(base):
subprocess.call(["svn", "co", "%s@%s" % (info.rpath, str(info.revision)), folder])
else:
with DirectoryContext(info.wpath):
subprocess.call(["svn", "up", "--accept", "theirs-full", "-r", str(info.revision)])
def revision(self, path):
if not os.path.exists(path):
return
with DirectoryContext(path):
stdout = subprocess.check_output(["svnversion"])
match = self.version_exp.match(str(stdout, encoding="utf-8"))
if match:
return match.group(1)
def path(self, path):
if not os.path.exists(path):
return
with DirectoryContext(path):
stdout = subprocess.check_output(["svn", "info"])
match = self.path_exp.search(str(stdout, encoding="utf-8"))
if match:
return match.group(1)
SYSTEMS = {
Subversion.name: Subversion()
}

@ -97,10 +97,17 @@ def modules(module_cache):
break
if body_info:
mod = ModuleType(mod_name)
mod.bl_info = ast.literal_eval(body.value)
mod.__file__ = mod_path
mod.__time__ = os.path.getmtime(mod_path)
try:
mod = ModuleType(mod_name)
mod.bl_info = ast.literal_eval(body.value)
mod.__file__ = mod_path
mod.__time__ = os.path.getmtime(mod_path)
except:
print("AST error in module %s" % mod_name)
import traceback
traceback.print_exc()
raise
return mod
else:
return None

@ -28,6 +28,12 @@ The main function to use is: update_data_paths(...)
IS_TESTING = False
def drepr(string):
# is there a less crappy way to do this in python?, re.escape also escapes
# single quotes strings so cant use it.
return '"%s"' % repr(string)[1:-1].replace("\"", "\\\"").replace("\\'", "'")
class DataPathBuilder(object):
__slots__ = ("data_path", )
""" Dummy class used to parse fcurve and driver data paths.
@ -40,7 +46,12 @@ class DataPathBuilder(object):
return DataPathBuilder(self.data_path + (str_value, ))
def __getitem__(self, key):
str_value = '["%s"]' % key
if type(key) is int:
str_value = '[%d]' % key
elif type(key) is str:
str_value = '[%s]' % drepr(key)
else:
raise Exception("unsupported accessor %r of type %r (internal error)" % (key, type(key)))
return DataPathBuilder(self.data_path + (str_value, ))
def resolve(self, real_base, rna_update_from_map=None):
@ -170,6 +181,15 @@ def update_data_paths(rna_update):
continue
for fcurve in anim_data.drivers:
data_path = fcurve.data_path
data_path_new = find_path_new(anim_data_base, data_path, rna_update_dict, rna_update_from_map)
# print(data_path_new)
if data_path_new != data_path:
if not IS_TESTING:
fcurve.data_path = data_path_new
fcurve.driver.is_valid = True # reset to allow this to work again
print("driver-fcurve (%s): %s -> %s" % (id_data.name, data_path, data_path_new))
for var in fcurve.driver.variables:
if var.type == 'SINGLE_PROP':
for tar in var.targets:

@ -182,10 +182,8 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
_global_loaded_modules[:] = []
user_path = user_script_path()
for base_path in script_paths():
for path_subdir in ("", "ui", "op", "io", "keyingsets", "modules"):
for path_subdir in ("startup", "modules"):
path = _os.path.join(base_path, path_subdir)
if _os.path.isdir(path):
_sys_path_ensure(path)
@ -194,9 +192,6 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
if path_subdir == "modules":
continue
if user_path != base_path and path_subdir == "":
continue # avoid loading 2.4x scripts
for mod in modules_from_path(path, loaded_modules):
test_register(mod)
@ -357,7 +352,9 @@ def keyconfig_set(filepath):
keyconfigs_old = keyconfigs[:]
try:
exec(compile(open(filepath).read(), filepath, 'exec'), {"__file__": filepath})
file = open(filepath)
exec(compile(file.read(), filepath, 'exec'), {"__file__": filepath})
file.close()
except:
import traceback
traceback.print_exc()
@ -431,6 +428,7 @@ def _bpy_module_classes(module, is_registered=False):
def register_module(module, verbose=False):
if verbose:
print("bpy.utils.register_module(%r): ..." % module)
cls = None
for cls in _bpy_module_classes(module, is_registered=False):
if verbose:
print(" %r" % cls)
@ -438,12 +436,11 @@ def register_module(module, verbose=False):
register_class(cls)
except:
print("bpy.utils.register_module(): failed to registering class %r" % cls)
print("\t", path, "line", line)
import traceback
traceback.print_exc()
if verbose:
print("done.\n")
if "cls" not in locals():
if cls is None:
raise Exception("register_module(%r): defines no classes" % module)
@ -457,7 +454,6 @@ def unregister_module(module, verbose=False):
unregister_class(cls)
except:
print("bpy.utils.unregister_module(): failed to unregistering class %r" % cls)
print("\t", path, "line", line)
import traceback
traceback.print_exc()
if verbose:

@ -160,9 +160,9 @@ if __name__ == "__main__":
from bpyml_test import *
draw = [
ui() [
split() [
column() [
ui()[
split()[
column()[
prop(data='context.scene.render', property='use_stamp_time', text='Time'),
prop(data='context.scene.render', property='use_stamp_date', text='Date'),
prop(data='context.scene.render', property='use_stamp_render_time', text='RenderTime'),
@ -173,7 +173,7 @@ if __name__ == "__main__":
prop(data='context.scene.render', property='use_stamp_marker', text='Marker'),
prop(data='context.scene.render', property='use_stamp_sequencer_strip', text='Seq. Strip')
],
column() [
column()[
active(expr='context.scene.render.use_stamp'),
prop(data='context.scene.render', property='stamp_foreground', slider=True),
prop(data='context.scene.render', property='stamp_background', slider=True),
@ -181,9 +181,9 @@ if __name__ == "__main__":
prop(data='context.scene.render', property='stamp_font_size', text='Font Size')
]
],
split(percentage=0.2) [
split(percentage=0.2)[
prop(data='context.scene.render', property='use_stamp_note', text='Note'),
row() [
row()[
active(expr='context.scene.render.use_stamp_note'),
prop(data='context.scene.render', property='stamp_note_text', text='')
]

@ -304,15 +304,3 @@ def banner(context):
sc.prompt = PROMPT
return {'FINISHED'}
def register():
pass
def unregister():
pass
if __name__ == "__main__":
register()

@ -76,14 +76,3 @@ def banner(context):
sc.prompt = os.getcwd() + PROMPT
return {'FINISHED'}
def register():
pass
def unregister():
pass
if __name__ == "__main__":
register()

@ -207,7 +207,7 @@ class InfoPropertyRNA:
self.fixed_type = None
if self.type == "enum":
self.enum_items[:] = rna_prop.items.keys()
self.enum_items[:] = rna_prop.enum_items.keys()
self.is_enum_flag = rna_prop.is_enum_flag
else:
self.is_enum_flag = False
@ -253,7 +253,7 @@ class InfoPropertyRNA:
return "%s=%s" % (self.identifier, default)
return self.identifier
def get_type_description(self, as_ret=False, as_arg=False, class_fmt="%s"):
def get_type_description(self, as_ret=False, as_arg=False, class_fmt="%s", collection_id="Collection"):
type_str = ""
if self.fixed_type is None:
type_str += self.type
@ -277,9 +277,9 @@ class InfoPropertyRNA:
else:
if self.type == "collection":
if self.collection_type:
collection_str = (class_fmt % self.collection_type.identifier) + " collection of "
collection_str = (class_fmt % self.collection_type.identifier) + (" %s of " % collection_id)
else:
collection_str = "Collection of "
collection_str = "%s of " % collection_id
else:
collection_str = ""

@ -51,8 +51,8 @@ def textWrap(text, length=70):
def write_sysinfo(op):
output_filename = "system-info.txt"
if output_filename in bpy.data.texts.keys():
output = bpy.data.texts[output_filename]
output = bpy.data.texts.get(output_filename)
if output:
output.clear()
else:
output = bpy.data.texts.new(name=output_filename)
@ -94,16 +94,19 @@ def write_sysinfo(op):
output.write('autosave: {}\n'.format(bpy.utils.user_resource('AUTOSAVE')))
output.write('tempdir: {}\n'.format(bpy.app.tempdir))
output.write('\nOpenGL\n')
output.write(lilies)
output.write('renderer:\t{}\n'.format(bgl.glGetString(bgl.GL_RENDERER)))
output.write('vendor:\t\t{}\n'.format(bgl.glGetString(bgl.GL_VENDOR)))
output.write('version:\t{}\n'.format(bgl.glGetString(bgl.GL_VERSION)))
output.write('extensions:\n')
if bpy.app.background:
output.write('\nOpenGL: missing, background mode\n')
else:
output.write('\nOpenGL\n')
output.write(lilies)
output.write('renderer:\t{}\n'.format(bgl.glGetString(bgl.GL_RENDERER)))
output.write('vendor:\t\t{}\n'.format(bgl.glGetString(bgl.GL_VENDOR)))
output.write('version:\t{}\n'.format(bgl.glGetString(bgl.GL_VERSION)))
output.write('extensions:\n')
glext = bgl.glGetString(bgl.GL_EXTENSIONS)
glext = textWrap(glext, 70)
for l in glext:
output.write('\t\t{}\n'.format(l))
glext = bgl.glGetString(bgl.GL_EXTENSIONS)
glext = textWrap(glext, 70)
for l in glext:
output.write('\t\t{}\n'.format(l))
op.report({'INFO'}, "System information generated in 'system-info.txt'")

@ -1,89 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
def main(context):
from math import pi
def cleanupEulCurve(fcv):
keys = []
for k in fcv.keyframe_points:
keys.append([k.handle_left.copy(), k.co.copy(), k.handle_right.copy()])
for i in range(len(keys)):
cur = keys[i]
prev = keys[i - 1] if i > 0 else None
next = keys[i + 1] if i < len(keys) - 1 else None
if prev is None:
continue
th = pi
if abs(prev[1][1] - cur[1][1]) >= th: # more than 180 degree jump
fac = pi * 2.0
if prev[1][1] > cur[1][1]:
while abs(cur[1][1] - prev[1][1]) >= th: # < prev[1][1]:
cur[0][1] += fac
cur[1][1] += fac
cur[2][1] += fac
elif prev[1][1] < cur[1][1]:
while abs(cur[1][1] - prev[1][1]) >= th:
cur[0][1] -= fac
cur[1][1] -= fac
cur[2][1] -= fac
for i in range(len(keys)):
for x in range(2):
fcv.keyframe_points[i].handle_left[x] = keys[i][0][x]
fcv.keyframe_points[i].co[x] = keys[i][1][x]
fcv.keyframe_points[i].handle_right[x] = keys[i][2][x]
flist = bpy.context.active_object.animation_data.action.fcurves
for f in flist:
if f.select and f.data_path.endswith("rotation_euler"):
cleanupEulCurve(f)
class DiscontFilterOp(bpy.types.Operator):
"""Fixes the most common causes of gimbal lock in the fcurves of the active bone"""
bl_idname = "graph.euler_filter"
bl_label = "Filter out discontinuities in the active fcurves"
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
main(context)
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -1,121 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# To support reload properly, try to access a package var, if it's there, reload everything
if "bpy" in locals():
import imp
if "import_bvh" in locals():
imp.reload(import_bvh)
import bpy
from bpy.props import *
from io_utils import ImportHelper, ExportHelper
class BvhImporter(bpy.types.Operator, ImportHelper):
'''Load a BVH motion capture file'''
bl_idname = "import_anim.bvh"
bl_label = "Import BVH"
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
target = EnumProperty(items=(
('ARMATURE', "Armature", ""),
('OBJECT', "Object", ""),
),
name="Target",
description="Import target type.",
default='ARMATURE')
global_scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=1.0)
frame_start = IntProperty(name="Start Frame", description="Starting frame for the animation", default=1)
use_cyclic = BoolProperty(name="Loop", description="Loop the animation playback", default=False)
rotate_mode = EnumProperty(items=(
('QUATERNION', "Quaternion", "Convert rotations to quaternions"),
('NATIVE', "Euler (Native)", "Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
name="Rotation",
description="Rotation conversion.",
default='NATIVE')
def execute(self, context):
from . import import_bvh
return import_bvh.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
class BvhExporter(bpy.types.Operator, ExportHelper):
'''Save a BVH motion capture file from an armature'''
bl_idname = "export_anim.bvh"
bl_label = "Export BVH"
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
global_scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=1.0)
frame_start = IntProperty(name="Start Frame", description="Starting frame to export", default=0)
frame_end = IntProperty(name="End Frame", description="End frame to export", default=0)
@classmethod
def poll(cls, context):
obj = context.object
return obj and obj.type == 'ARMATURE'
def invoke(self, context, event):
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
return super().invoke(context, event)
def execute(self, context):
if self.frame_start == 0 and self.frame_end == 0:
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
from . import export_bvh
return export_bvh.save(self, context, **self.as_keywords(ignore=("check_existing", "filter_glob")))
def menu_func_import(self, context):
self.layout.operator(BvhImporter.bl_idname, text="Motion Capture (.bvh)")
def menu_func_export(self, context):
self.layout.operator(BvhExporter.bl_idname, text="Motion Capture (.bvh)")
def register():
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()

@ -1,245 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton
# fixes from Andrea Rugliancich
import bpy
def write_armature(context, filepath, frame_start, frame_end, global_scale=1.0):
from mathutils import Matrix, Vector, Euler
from math import degrees
file = open(filepath, "w")
obj = context.object
arm = obj.data
# Build a dictionary of children.
# None for parentless
children = {None: []}
# initialize with blank lists
for bone in arm.bones:
children[bone.name] = []
for bone in arm.bones:
children[getattr(bone.parent, "name", None)].append(bone.name)
# sort the children
for children_list in children.values():
children_list.sort()
# bone name list in the order that the bones are written
serialized_names = []
node_locations = {}
file.write("HIERARCHY\n")
def write_recursive_nodes(bone_name, indent):
my_children = children[bone_name]
indent_str = "\t" * indent
bone = arm.bones[bone_name]
loc = bone.head_local
node_locations[bone_name] = loc
# make relative if we can
if bone.parent:
loc = loc - node_locations[bone.parent.name]
if indent:
file.write("%sJOINT %s\n" % (indent_str, bone_name))
else:
file.write("%sROOT %s\n" % (indent_str, bone_name))
file.write("%s{\n" % indent_str)
file.write("%s\tOFFSET %.6f %.6f %.6f\n" % (indent_str, loc.x * global_scale, loc.y * global_scale, loc.z * global_scale))
if bone.use_connect and bone.parent:
file.write("%s\tCHANNELS 3 Xrotation Yrotation Zrotation\n" % indent_str)
else:
file.write("%s\tCHANNELS 6 Xposition Yposition Zposition Xrotation Yrotation Zrotation\n" % indent_str)
if my_children:
# store the location for the children
# to het their relative offset
# Write children
for child_bone in my_children:
serialized_names.append(child_bone)
write_recursive_nodes(child_bone, indent + 1)
else:
# Write the bone end.
file.write("%s\tEnd Site\n" % indent_str)
file.write("%s\t{\n" % indent_str)
loc = bone.tail_local - node_locations[bone_name]
file.write("%s\t\tOFFSET %.6f %.6f %.6f\n" % (indent_str, loc.x * global_scale, loc.y * global_scale, loc.z * global_scale))
file.write("%s\t}\n" % indent_str)
file.write("%s}\n" % indent_str)
if len(children[None]) == 1:
key = children[None][0]
serialized_names.append(key)
indent = 0
write_recursive_nodes(key, indent)
else:
# Write a dummy parent node
file.write("ROOT %s\n" % key)
file.write("{\n")
file.write("\tOFFSET 0.0 0.0 0.0\n")
file.write("\tCHANNELS 0\n") # Xposition Yposition Zposition Xrotation Yrotation Zrotation
key = None
indent = 1
write_recursive_nodes(key, indent)
file.write("}\n")
# redefine bones as sorted by serialized_names
# so we can write motion
class decorated_bone(object):
__slots__ = (\
"name", # bone name, used as key in many places
"parent", # decorated bone parent, set in a later loop
"rest_bone", # blender armature bone
"pose_bone", # blender pose bone
"pose_mat", # blender pose matrix
"rest_arm_mat", # blender rest matrix (armature space)
"rest_local_mat", # blender rest batrix (local space)
"pose_imat", # pose_mat inverted
"rest_arm_imat", # rest_arm_mat inverted
"rest_local_imat", # rest_local_mat inverted
"prev_euler", # last used euler to preserve euler compability in between keyframes
"connected", # is the bone connected to the parent bone?
)
def __init__(self, bone_name):
self.name = bone_name
self.rest_bone = arm.bones[bone_name]
self.pose_bone = obj.pose.bones[bone_name]
self.pose_mat = self.pose_bone.matrix
mat = self.rest_bone.matrix
self.rest_arm_mat = self.rest_bone.matrix_local
self.rest_local_mat = self.rest_bone.matrix
# inverted mats
self.pose_imat = self.pose_mat.copy().invert()
self.rest_arm_imat = self.rest_arm_mat.copy().invert()
self.rest_local_imat = self.rest_local_mat.copy().invert()
self.parent = None
self.prev_euler = Euler((0.0, 0.0, 0.0))
self.connected = (self.rest_bone.use_connect and self.rest_bone.parent)
def update_posedata(self):
self.pose_mat = self.pose_bone.matrix
self.pose_imat = self.pose_mat.copy().invert()
def __repr__(self):
if self.parent:
return "[\"%s\" child on \"%s\"]\n" % (self.name, self.parent.name)
else:
return "[\"%s\" root bone]\n" % (self.name)
bones_decorated = [decorated_bone(bone_name) for bone_name in serialized_names]
# Assign parents
bones_decorated_dict = {}
for dbone in bones_decorated:
bones_decorated_dict[dbone.name] = dbone
for dbone in bones_decorated:
parent = dbone.rest_bone.parent
if parent:
dbone.parent = bones_decorated_dict[parent.name]
del bones_decorated_dict
# finish assigning parents
scene = bpy.context.scene
file.write("MOTION\n")
file.write("Frames: %d\n" % (frame_end - frame_start + 1))
file.write("Frame Time: %.6f\n" % (1.0 / (scene.render.fps / scene.render.fps_base)))
for frame in range(frame_start, frame_end + 1):
scene.frame_set(frame)
for dbone in bones_decorated:
dbone.update_posedata()
for dbone in bones_decorated:
trans = Matrix.Translation(dbone.rest_bone.head_local)
itrans = Matrix.Translation(-dbone.rest_bone.head_local)
if dbone.parent:
mat_final = dbone.parent.rest_arm_mat * dbone.parent.pose_imat * dbone.pose_mat * dbone.rest_arm_imat
mat_final = itrans * mat_final * trans
loc = mat_final.translation_part() + (dbone.rest_bone.head_local - dbone.parent.rest_bone.head_local)
else:
mat_final = dbone.pose_mat * dbone.rest_arm_imat
mat_final = itrans * mat_final * trans
loc = mat_final.translation_part() + dbone.rest_bone.head
# keep eulers compatible, no jumping on interpolation.
rot = mat_final.rotation_part().invert().to_euler('XYZ', dbone.prev_euler)
if not dbone.connected:
file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
file.write("%.6f %.6f %.6f " % (-degrees(rot[0]), -degrees(rot[1]), -degrees(rot[2])))
dbone.prev_euler = rot
file.write("\n")
file.close()
print("BVH Exported: %s frames:%d\n" % (filepath, frame_end - frame_start + 1))
def save(operator, context, filepath="",
frame_start=-1,
frame_end=-1,
global_scale=1.0,
):
write_armature(context, filepath,
frame_start=frame_start,
frame_end=frame_end,
global_scale=global_scale,
)
return {'FINISHED'}
if __name__ == "__main__":
scene = bpy.context.scene
_read(bpy.data.filepath.rstrip(".blend") + ".bvh", bpy.context.object, scene.frame_start, scene.frame_end, 1.0)

@ -1,550 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton
import math
from math import radians
import bpy
import mathutils
from mathutils import Vector, Euler, Matrix
class bvh_node_class(object):
__slots__ = (
'name', # bvh joint name
'parent', # bvh_node_class type or None for no parent
'children', # a list of children of this type.
'rest_head_world', # worldspace rest location for the head of this node
'rest_head_local', # localspace rest location for the head of this node
'rest_tail_world', # worldspace rest location for the tail of this node
'rest_tail_local', # worldspace rest location for the tail of this node
'channels', # list of 6 ints, -1 for an unused channel, otherwise an index for the BVH motion data lines, lock triple then rot triple
'rot_order', # a triple of indicies as to the order rotation is applied. [0,1,2] is x/y/z - [None, None, None] if no rotation.
'rot_order_str', # same as above but a string 'XYZ' format.
'anim_data', # a list one tuple's one for each frame. (locx, locy, locz, rotx, roty, rotz), euler rotation ALWAYS stored xyz order, even when native used.
'has_loc', # Conveinience function, bool, same as (channels[0]!=-1 or channels[1]!=-1 channels[2]!=-1)
'has_rot', # Conveinience function, bool, same as (channels[3]!=-1 or channels[4]!=-1 channels[5]!=-1)
'temp') # use this for whatever you want
_eul_order_lookup = {\
(0, 1, 2): 'XYZ',
(0, 2, 1): 'XZY',
(1, 0, 2): 'YXZ',
(1, 2, 0): 'YZX',
(2, 0, 1): 'ZXY',
(2, 1, 0): 'ZYX'}
def __init__(self, name, rest_head_world, rest_head_local, parent, channels, rot_order):
self.name = name
self.rest_head_world = rest_head_world
self.rest_head_local = rest_head_local
self.rest_tail_world = None
self.rest_tail_local = None
self.parent = parent
self.channels = channels
self.rot_order = tuple(rot_order)
self.rot_order_str = __class__._eul_order_lookup[self.rot_order]
# convenience functions
self.has_loc = channels[0] != -1 or channels[1] != -1 or channels[2] != -1
self.has_rot = channels[3] != -1 or channels[4] != -1 or channels[5] != -1
self.children = []
# list of 6 length tuples: (lx,ly,lz, rx,ry,rz)
# even if the channels arnt used they will just be zero
#
self.anim_data = [(0, 0, 0, 0, 0, 0)]
def __repr__(self):
return 'BVH name:"%s", rest_loc:(%.3f,%.3f,%.3f), rest_tail:(%.3f,%.3f,%.3f)' %\
(self.name,\
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z,\
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z)
def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
# File loading stuff
# Open the file for importing
file = open(file_path, 'rU')
# Seperate into a list of lists, each line a list of words.
file_lines = file.readlines()
# Non standard carrage returns?
if len(file_lines) == 1:
file_lines = file_lines[0].split('\r')
# Split by whitespace.
file_lines = [ll for ll in [l.split() for l in file_lines] if ll]
# Create Hirachy as empties
if file_lines[0][0].lower() == 'hierarchy':
#print 'Importing the BVH Hierarchy for:', file_path
pass
else:
raise 'ERROR: This is not a BVH file'
bvh_nodes = {None: None}
bvh_nodes_serial = [None]
channelIndex = -1
lineIdx = 0 # An index for the file.
while lineIdx < len(file_lines) - 1:
#...
if file_lines[lineIdx][0].lower() == 'root' or file_lines[lineIdx][0].lower() == 'joint':
# Join spaces into 1 word with underscores joining it.
if len(file_lines[lineIdx]) > 2:
file_lines[lineIdx][1] = '_'.join(file_lines[lineIdx][1:])
file_lines[lineIdx] = file_lines[lineIdx][:2]
# MAY NEED TO SUPPORT MULTIPLE ROOT's HERE!!!, Still unsure weather multiple roots are possible.??
# Make sure the names are unique- Object names will match joint names exactly and both will be unique.
name = file_lines[lineIdx][1]
#print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
lineIdx += 2 # Increment to the next line (Offset)
rest_head_local = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * global_scale
lineIdx += 1 # Increment to the next line (Channels)
# newChannel[Xposition, Yposition, Zposition, Xrotation, Yrotation, Zrotation]
# newChannel references indecies to the motiondata,
# if not assigned then -1 refers to the last value that will be added on loading at a value of zero, this is appended
# We'll add a zero value onto the end of the MotionDATA so this is always refers to a value.
my_channel = [-1, -1, -1, -1, -1, -1]
my_rot_order = [None, None, None]
rot_count = 0
for channel in file_lines[lineIdx][2:]:
channel = channel.lower()
channelIndex += 1 # So the index points to the right channel
if channel == 'xposition':
my_channel[0] = channelIndex
elif channel == 'yposition':
my_channel[1] = channelIndex
elif channel == 'zposition':
my_channel[2] = channelIndex
elif channel == 'xrotation':
my_channel[3] = channelIndex
my_rot_order[rot_count] = 0
rot_count += 1
elif channel == 'yrotation':
my_channel[4] = channelIndex
my_rot_order[rot_count] = 1
rot_count += 1
elif channel == 'zrotation':
my_channel[5] = channelIndex
my_rot_order[rot_count] = 2
rot_count += 1
channels = file_lines[lineIdx][2:]
my_parent = bvh_nodes_serial[-1] # account for none
# Apply the parents offset accumulatively
if my_parent is None:
rest_head_world = Vector(rest_head_local)
else:
rest_head_world = my_parent.rest_head_world + rest_head_local
bvh_node = bvh_nodes[name] = bvh_node_class(name, rest_head_world, rest_head_local, my_parent, my_channel, my_rot_order)
# If we have another child then we can call ourselves a parent, else
bvh_nodes_serial.append(bvh_node)
# Account for an end node
if file_lines[lineIdx][0].lower() == 'end' and file_lines[lineIdx][1].lower() == 'site': # There is sometimes a name after 'End Site' but we will ignore it.
lineIdx += 2 # Increment to the next line (Offset)
rest_tail = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * global_scale
bvh_nodes_serial[-1].rest_tail_world = bvh_nodes_serial[-1].rest_head_world + rest_tail
bvh_nodes_serial[-1].rest_tail_local = bvh_nodes_serial[-1].rest_head_local + rest_tail
# Just so we can remove the Parents in a uniform way- End has kids
# so this is a placeholder
bvh_nodes_serial.append(None)
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0] == '}': # == ['}']
bvh_nodes_serial.pop() # Remove the last item
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0].lower() == 'motion':
#print '\nImporting motion data'
lineIdx += 3 # Set the cursor to the first frame
break
lineIdx += 1
# Remove the None value used for easy parent reference
del bvh_nodes[None]
# Dont use anymore
del bvh_nodes_serial
bvh_nodes_list = bvh_nodes.values()
while lineIdx < len(file_lines):
line = file_lines[lineIdx]
for bvh_node in bvh_nodes_list:
#for bvh_node in bvh_nodes_serial:
lx = ly = lz = rx = ry = rz = 0.0
channels = bvh_node.channels
anim_data = bvh_node.anim_data
if channels[0] != -1:
lx = global_scale * float(line[channels[0]])
if channels[1] != -1:
ly = global_scale * float(line[channels[1]])
if channels[2] != -1:
lz = global_scale * float(line[channels[2]])
if channels[3] != -1 or channels[4] != -1 or channels[5] != -1:
rx = radians(float(line[channels[3]]))
ry = radians(float(line[channels[4]]))
rz = radians(float(line[channels[5]]))
# Done importing motion data #
anim_data.append((lx, ly, lz, rx, ry, rz))
lineIdx += 1
# Assign children
for bvh_node in bvh_nodes.values():
bvh_node_parent = bvh_node.parent
if bvh_node_parent:
bvh_node_parent.children.append(bvh_node)
# Now set the tip of each bvh_node
for bvh_node in bvh_nodes.values():
if not bvh_node.rest_tail_world:
if len(bvh_node.children) == 0:
# could just fail here, but rare BVH files have childless nodes
bvh_node.rest_tail_world = Vector(bvh_node.rest_head_world)
bvh_node.rest_tail_local = Vector(bvh_node.rest_head_local)
elif len(bvh_node.children) == 1:
bvh_node.rest_tail_world = Vector(bvh_node.children[0].rest_head_world)
bvh_node.rest_tail_local = bvh_node.rest_head_local + bvh_node.children[0].rest_head_local
else:
# allow this, see above
#if not bvh_node.children:
# raise 'error, bvh node has no end and no children. bad file'
# Removed temp for now
rest_tail_world = Vector((0.0, 0.0, 0.0))
rest_tail_local = Vector((0.0, 0.0, 0.0))
for bvh_node_child in bvh_node.children:
rest_tail_world += bvh_node_child.rest_head_world
rest_tail_local += bvh_node_child.rest_head_local
bvh_node.rest_tail_world = rest_tail_world * (1.0 / len(bvh_node.children))
bvh_node.rest_tail_local = rest_tail_local * (1.0 / len(bvh_node.children))
# Make sure tail isnt the same location as the head.
if (bvh_node.rest_tail_local - bvh_node.rest_head_local).length <= 0.001 * global_scale:
bvh_node.rest_tail_local.y = bvh_node.rest_tail_local.y + global_scale / 10
bvh_node.rest_tail_world.y = bvh_node.rest_tail_world.y + global_scale / 10
return bvh_nodes
def bvh_node_dict2objects(context, bvh_name, bvh_nodes, rotate_mode='NATIVE', frame_start=1, IMPORT_LOOP=False):
if frame_start < 1:
frame_start = 1
scene = context.scene
for obj in scene.objects:
obj.select = False
objects = []
def add_ob(name):
obj = bpy.data.objects.new(name, None)
scene.objects.link(obj)
objects.append(obj)
obj.select = True
# nicer drawing.
obj.empty_draw_type = 'CUBE'
obj.empty_draw_size = 0.1
return obj
# Add objects
for name, bvh_node in bvh_nodes.items():
bvh_node.temp = add_ob(name)
bvh_node.temp.rotation_mode = bvh_node.rot_order_str[::-1]
# Parent the objects
for bvh_node in bvh_nodes.values():
for bvh_node_child in bvh_node.children:
bvh_node_child.temp.parent = bvh_node.temp
# Offset
for bvh_node in bvh_nodes.values():
# Make relative to parents offset
bvh_node.temp.location = bvh_node.rest_head_local
# Add tail objects
for name, bvh_node in bvh_nodes.items():
if not bvh_node.children:
ob_end = add_ob(name + '_end')
ob_end.parent = bvh_node.temp
ob_end.location = bvh_node.rest_tail_world - bvh_node.rest_head_world
for name, bvh_node in bvh_nodes.items():
obj = bvh_node.temp
for frame_current in range(len(bvh_node.anim_data)):
lx, ly, lz, rx, ry, rz = bvh_node.anim_data[frame_current]
if bvh_node.has_loc:
obj.delta_location = Vector((lx, ly, lz)) - bvh_node.rest_head_world
obj.keyframe_insert("delta_location", index=-1, frame=frame_start + frame_current)
if bvh_node.has_rot:
obj.delta_rotation_euler = rx, ry, rz
obj.keyframe_insert("delta_rotation_euler", index=-1, frame=frame_start + frame_current)
return objects
def bvh_node_dict2armature(context, bvh_name, bvh_nodes, rotate_mode='XYZ', frame_start=1, IMPORT_LOOP=False):
if frame_start < 1:
frame_start = 1
# Add the new armature,
scene = context.scene
for obj in scene.objects:
obj.select = False
arm_data = bpy.data.armatures.new(bvh_name)
arm_ob = bpy.data.objects.new(bvh_name, arm_data)
scene.objects.link(arm_ob)
arm_ob.select = True
scene.objects.active = arm_ob
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
# Get the average bone length for zero length bones, we may not use this.
average_bone_length = 0.0
nonzero_count = 0
for bvh_node in bvh_nodes.values():
l = (bvh_node.rest_head_local - bvh_node.rest_tail_local).length
if l:
average_bone_length += l
nonzero_count += 1
# Very rare cases all bones couldbe zero length???
if not average_bone_length:
average_bone_length = 0.1
else:
# Normal operation
average_bone_length = average_bone_length / nonzero_count
# XXX, annoying, remove bone.
while arm_data.edit_bones:
arm_ob.edit_bones.remove(arm_data.edit_bones[-1])
ZERO_AREA_BONES = []
for name, bvh_node in bvh_nodes.items():
# New editbone
bone = bvh_node.temp = arm_data.edit_bones.new(name)
bone.head = bvh_node.rest_head_world
bone.tail = bvh_node.rest_tail_world
# ZERO AREA BONES.
if (bone.head - bone.tail).length < 0.001:
if bvh_node.parent:
ofs = bvh_node.parent.rest_head_local - bvh_node.parent.rest_tail_local
if ofs.length: # is our parent zero length also?? unlikely
bone.tail = bone.tail + ofs
else:
bone.tail.y = bone.tail.y + average_bone_length
else:
bone.tail.y = bone.tail.y + average_bone_length
ZERO_AREA_BONES.append(bone.name)
for bvh_node in bvh_nodes.values():
if bvh_node.parent:
# bvh_node.temp is the Editbone
# Set the bone parent
bvh_node.temp.parent = bvh_node.parent.temp
# Set the connection state
if not bvh_node.has_loc and\
bvh_node.parent and\
bvh_node.parent.temp.name not in ZERO_AREA_BONES and\
bvh_node.parent.rest_tail_local == bvh_node.rest_head_local:
bvh_node.temp.use_connect = True
# Replace the editbone with the editbone name,
# to avoid memory errors accessing the editbone outside editmode
for bvh_node in bvh_nodes.values():
bvh_node.temp = bvh_node.temp.name
# Now Apply the animation to the armature
# Get armature animation data
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
pose = arm_ob.pose
pose_bones = pose.bones
if rotate_mode == 'NATIVE':
for bvh_node in bvh_nodes.values():
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
pose_bone = pose_bones[bone_name]
pose_bone.rotation_mode = bvh_node.rot_order_str
elif rotate_mode != 'QUATERNION':
for pose_bone in pose_bones:
pose_bone.rotation_mode = rotate_mode
else:
# Quats default
pass
context.scene.update()
arm_ob.animation_data_create()
action = bpy.data.actions.new(name=bvh_name)
arm_ob.animation_data.action = action
# Replace the bvh_node.temp (currently an editbone)
# With a tuple (pose_bone, armature_bone, bone_rest_matrix, bone_rest_matrix_inv)
for bvh_node in bvh_nodes.values():
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
pose_bone = pose_bones[bone_name]
rest_bone = arm_data.bones[bone_name]
bone_rest_matrix = rest_bone.matrix_local.rotation_part()
bone_rest_matrix_inv = Matrix(bone_rest_matrix)
bone_rest_matrix_inv.invert()
bone_rest_matrix_inv.resize4x4()
bone_rest_matrix.resize4x4()
bvh_node.temp = (pose_bone, bone, bone_rest_matrix, bone_rest_matrix_inv)
# Make a dict for fast access without rebuilding a list all the time.
# KEYFRAME METHOD, SLOW, USE IPOS DIRECT
# TODO: use f-point samples instead (Aligorith)
if rotate_mode != 'QUATERNION':
prev_euler = [Euler() for i in range(len(bvh_nodes))]
# Animate the data, the last used bvh_node will do since they all have the same number of frames
for frame_current in range(len(bvh_node.anim_data) - 1): # skip the first frame (rest frame)
# print frame_current
# if frame_current==40: # debugging
# break
scene.frame_set(frame_start + frame_current)
# Dont neet to set the current frame
for i, bvh_node in enumerate(bvh_nodes.values()):
pose_bone, bone, bone_rest_matrix, bone_rest_matrix_inv = bvh_node.temp
lx, ly, lz, rx, ry, rz = bvh_node.anim_data[frame_current + 1]
if bvh_node.has_rot:
# apply rotation order and convert to XYZ
# note that the rot_order_str is reversed.
bone_rotation_matrix = Euler((rx, ry, rz), bvh_node.rot_order_str[::-1]).to_matrix().resize4x4()
bone_rotation_matrix = bone_rest_matrix_inv * bone_rotation_matrix * bone_rest_matrix
if rotate_mode == 'QUATERNION':
pose_bone.rotation_quaternion = bone_rotation_matrix.to_quat()
else:
euler = bone_rotation_matrix.to_euler(bvh_node.rot_order_str, prev_euler[i])
pose_bone.rotation_euler = euler
prev_euler[i] = euler
if bvh_node.has_loc:
pose_bone.location = (bone_rest_matrix_inv * Matrix.Translation(Vector((lx, ly, lz)) - bvh_node.rest_head_local)).translation_part()
if bvh_node.has_loc:
pose_bone.keyframe_insert("location")
if bvh_node.has_rot:
if rotate_mode == 'QUATERNION':
pose_bone.keyframe_insert("rotation_quaternion")
else:
pose_bone.keyframe_insert("rotation_euler")
for cu in action.fcurves:
if IMPORT_LOOP:
pass # 2.5 doenst have cyclic now?
for bez in cu.keyframe_points:
bez.interpolation = 'LINEAR'
return arm_ob
def load(operator, context, filepath="", target='ARMATURE', rotate_mode='NATIVE', global_scale=1.0, use_cyclic=False, frame_start=1):
import time
t1 = time.time()
print('\tparsing bvh %r...' % filepath, end="")
bvh_nodes = read_bvh(context, filepath,
rotate_mode=rotate_mode,
global_scale=global_scale)
print('%.4f' % (time.time() - t1))
frame_orig = context.scene.frame_current
t1 = time.time()
print('\timporting to blender...', end="")
bvh_name = bpy.path.display_name_from_filepath(filepath)
if target == 'ARMATURE':
bvh_node_dict2armature(context, bvh_name, bvh_nodes,
rotate_mode=rotate_mode,
frame_start=frame_start,
IMPORT_LOOP=use_cyclic)
elif target == 'OBJECT':
bvh_node_dict2objects(context, bvh_name, bvh_nodes,
rotate_mode=rotate_mode,
frame_start=frame_start,
IMPORT_LOOP=use_cyclic)
else:
raise Exception("invalid type")
print('Done in %.4f\n' % (time.time() - t1))
context.scene.frame_set(frame_orig)
return {'FINISHED'}

@ -1,88 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# To support reload properly, try to access a package var, if it's there, reload everything
if "bpy" in locals():
import imp
if "import_3ds" in locals():
imp.reload(import_3ds)
if "export_3ds" in locals():
imp.reload(export_3ds)
import bpy
from bpy.props import *
from io_utils import ImportHelper, ExportHelper
class Import3DS(bpy.types.Operator, ImportHelper):
'''Import from 3DS file format (.3ds)'''
bl_idname = "import_scene.autodesk_3ds"
bl_label = 'Import 3DS'
filename_ext = ".3ds"
filter_glob = StringProperty(default="*.3ds", options={'HIDDEN'})
constrain_size = FloatProperty(name="Size Constraint", description="Scale the model by 10 until it reacehs the size constraint. Zero Disables.", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=10.0)
use_image_search = BoolProperty(name="Image Search", description="Search subdirectories for any assosiated images (Warning, may be slow)", default=True)
use_apply_transform = BoolProperty(name="Apply Transform", description="Workaround for object transformations importing incorrectly", default=True)
def execute(self, context):
from . import import_3ds
return import_3ds.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
class Export3DS(bpy.types.Operator, ExportHelper):
'''Export to 3DS file format (.3ds)'''
bl_idname = "export_scene.autodesk_3ds"
bl_label = 'Export 3DS'
filename_ext = ".3ds"
filter_glob = StringProperty(default="*.3ds", options={'HIDDEN'})
def execute(self, context):
from . import export_3ds
return export_3ds.save(self, context, **self.as_keywords(ignore=("check_existing", "filter_glob")))
# Add to a menu
def menu_func_export(self, context):
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
def menu_func_import(self, context):
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
def register():
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
# NOTES:
# why add 1 extra vertex? and remove it when done? - "Answer - eekadoodle - would need to re-order UV's without this since face order isnt always what we give blender, BMesh will solve :D"
# disabled scaling to size, this requires exposing bb (easy) and understanding how it works (needs some time)
if __name__ == "__main__":
register()

File diff suppressed because it is too large Load Diff

@ -1,897 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Bob Holcomb
# Contributors: Bob Holcomb, Richard L?rk?ng, Damien McGinnes, Campbell Barton, Mario Lapin, Dominique Lorre
import os
import time
import struct
from io_utils import load_image
import bpy
import mathutils
BOUNDS_3DS = []
######################################################
# Data Structures
######################################################
#Some of the chunks that we will see
#----- Primary Chunk, at the beginning of each file
PRIMARY = int('0x4D4D',16)
#------ Main Chunks
OBJECTINFO = 0x3D3D #This gives the version of the mesh and is found right before the material and object information
VERSION = 0x0002 #This gives the version of the .3ds file
EDITKEYFRAME= 0xB000 #This is the header for all of the key frame info
#------ sub defines of OBJECTINFO
MATERIAL = 45055 #0xAFFF // This stored the texture info
OBJECT = 16384 #0x4000 // This stores the faces, vertices, etc...
#>------ sub defines of MATERIAL
#------ sub defines of MATERIAL_BLOCK
MAT_NAME = 0xA000 # This holds the material name
MAT_AMBIENT = 0xA010 # Ambient color of the object/material
MAT_DIFFUSE = 0xA020 # This holds the color of the object/material
MAT_SPECULAR = 0xA030 # SPecular color of the object/material
MAT_SHINESS = 0xA040 # ??
MAT_TRANSPARENCY= 0xA050 # Transparency value of material
MAT_SELF_ILLUM = 0xA080 # Self Illumination value of material
MAT_WIRE = 0xA085 # Only render's wireframe
MAT_TEXTURE_MAP = 0xA200 # This is a header for a new texture map
MAT_SPECULAR_MAP= 0xA204 # This is a header for a new specular map
MAT_OPACITY_MAP = 0xA210 # This is a header for a new opacity map
MAT_REFLECTION_MAP= 0xA220 # This is a header for a new reflection map
MAT_BUMP_MAP = 0xA230 # This is a header for a new bump map
MAT_MAP_FILEPATH = 0xA300 # This holds the file name of the texture
MAT_FLOAT_COLOR = 0x0010 #color defined as 3 floats
MAT_24BIT_COLOR = 0x0011 #color defined as 3 bytes
#>------ sub defines of OBJECT
OBJECT_MESH = 0x4100 # This lets us know that we are reading a new object
OBJECT_LAMP = 0x4600 # This lets un know we are reading a light object
OBJECT_LAMP_SPOT = 0x4610 # The light is a spotloght.
OBJECT_LAMP_OFF = 0x4620 # The light off.
OBJECT_LAMP_ATTENUATE = 0x4625
OBJECT_LAMP_RAYSHADE = 0x4627
OBJECT_LAMP_SHADOWED = 0x4630
OBJECT_LAMP_LOCAL_SHADOW = 0x4640
OBJECT_LAMP_LOCAL_SHADOW2 = 0x4641
OBJECT_LAMP_SEE_CONE = 0x4650
OBJECT_LAMP_SPOT_RECTANGULAR = 0x4651
OBJECT_LAMP_SPOT_OVERSHOOT = 0x4652
OBJECT_LAMP_SPOT_PROJECTOR = 0x4653
OBJECT_LAMP_EXCLUDE = 0x4654
OBJECT_LAMP_RANGE = 0x4655
OBJECT_LAMP_ROLL = 0x4656
OBJECT_LAMP_SPOT_ASPECT = 0x4657
OBJECT_LAMP_RAY_BIAS = 0x4658
OBJECT_LAMP_INNER_RANGE = 0x4659
OBJECT_LAMP_OUTER_RANGE = 0x465A
OBJECT_LAMP_MULTIPLIER = 0x465B
OBJECT_LAMP_AMBIENT_LIGHT = 0x4680
OBJECT_CAMERA= 0x4700 # This lets un know we are reading a camera object
#>------ sub defines of CAMERA
OBJECT_CAM_RANGES= 0x4720 # The camera range values
#>------ sub defines of OBJECT_MESH
OBJECT_VERTICES = 0x4110 # The objects vertices
OBJECT_FACES = 0x4120 # The objects faces
OBJECT_MATERIAL = 0x4130 # This is found if the object has a material, either texture map or color
OBJECT_UV = 0x4140 # The UV texture coordinates
OBJECT_TRANS_MATRIX = 0x4160 # The Object Matrix
#>------ sub defines of EDITKEYFRAME
# ED_KEY_AMBIENT_NODE = 0xB001
ED_KEY_OBJECT_NODE = 0xB002
# ED_KEY_CAMERA_NODE = 0xB003
# ED_KEY_TARGET_NODE = 0xB004
# ED_KEY_LIGHT_NODE = 0xB005
# ED_KEY_L_TARGET_NODE = 0xB006
# ED_KEY_SPOTLIGHT_NODE = 0xB007
#>------ sub defines of ED_KEY_OBJECT_NODE
# EK_OB_KEYFRAME_SEG = 0xB008
# EK_OB_KEYFRAME_CURTIME = 0xB009
# EK_OB_KEYFRAME_HEADER = 0xB00A
EK_OB_NODE_HEADER = 0xB010
EK_OB_INSTANCE_NAME = 0xB011
# EK_OB_PRESCALE = 0xB012
# EK_OB_PIVOT = 0xB013
# EK_OB_BOUNDBOX = 0xB014
# EK_OB_MORPH_SMOOTH = 0xB015
EK_OB_POSITION_TRACK = 0xB020
EK_OB_ROTATION_TRACK = 0xB021
EK_OB_SCALE_TRACK = 0xB022
# EK_OB_CAMERA_FOV_TRACK = 0xB023
# EK_OB_CAMERA_ROLL_TRACK = 0xB024
# EK_OB_COLOR_TRACK = 0xB025
# EK_OB_MORPH_TRACK = 0xB026
# EK_OB_HOTSPOT_TRACK = 0xB027
# EK_OB_FALLOF_TRACK = 0xB028
# EK_OB_HIDE_TRACK = 0xB029
# EK_OB_NODE_ID = 0xB030
ROOT_OBJECT = 0xFFFF
global scn
scn = None
global object_dictionary # dictionary for object hierarchy
object_dictionary = {}
#the chunk class
class chunk:
ID = 0
length = 0
bytes_read = 0
#we don't read in the bytes_read, we compute that
binary_format='<HI'
def __init__(self):
self.ID = 0
self.length = 0
self.bytes_read = 0
def dump(self):
print('ID: ', self.ID)
print('ID in hex: ', hex(self.ID))
print('length: ', self.length)
print('bytes_read: ', self.bytes_read)
def read_chunk(file, chunk):
temp_data = file.read(struct.calcsize(chunk.binary_format))
data = struct.unpack(chunk.binary_format, temp_data)
chunk.ID = data[0]
chunk.length = data[1]
#update the bytes read function
chunk.bytes_read = 6
#if debugging
#chunk.dump()
def read_string(file):
#read in the characters till we get a null character
s = b''
while True:
c = struct.unpack('<c', file.read(1))[0]
if c == b'\x00':
break
s += c
#print 'string: ',s
#remove the null character from the string
# print("read string", s)
return str(s, "utf-8", "replace"), len(s) + 1
######################################################
# IMPORT
######################################################
def process_next_object_chunk(file, previous_chunk):
new_chunk = chunk()
temp_chunk = chunk()
while (previous_chunk.bytes_read < previous_chunk.length):
#read the next chunk
read_chunk(file, new_chunk)
def skip_to_end(file, skip_chunk):
buffer_size = skip_chunk.length - skip_chunk.bytes_read
binary_format='%ic' % buffer_size
temp_data = file.read(struct.calcsize(binary_format))
skip_chunk.bytes_read += buffer_size
def add_texture_to_material(image, texture, material, mapto):
#print('assigning %s to %s' % (texture, material))
if mapto not in ("COLOR", "SPECULARITY", "ALPHA", "NORMAL"):
print('/tError: Cannot map to "%s"\n\tassuming diffuse color. modify material "%s" later.' % (mapto, material.name))
mapto = "COLOR"
if image:
texture.image = image
mtex = material.texture_slots.add()
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_color_diffuse = False
if mapto == 'COLOR':
mtex.use_map_color_diffuse = True
elif mapto == 'SPECULARITY':
mtex.use_map_specular = True
elif mapto == 'ALPHA':
mtex.use_map_alpha = True
elif mapto == 'NORMAL':
mtex.use_map_normal = True
def process_next_chunk(file, previous_chunk, importedObjects, IMAGE_SEARCH):
#print previous_chunk.bytes_read, 'BYTES READ'
contextObName = None
contextLamp = [None, None] # object, Data
contextMaterial = None
contextMatrix_rot = None # Blender.mathutils.Matrix(); contextMatrix.identity()
#contextMatrix_tx = None # Blender.mathutils.Matrix(); contextMatrix.identity()
contextMesh_vertls = None # flat array: (verts * 3)
contextMesh_facels = None
contextMeshMaterials = {} # matname:[face_idxs]
contextMeshUV = None # flat array (verts * 2)
TEXTURE_DICT = {}
MATDICT = {}
# TEXMODE = Mesh.FaceModes['TEX']
# Localspace variable names, faster.
STRUCT_SIZE_1CHAR = struct.calcsize('c')
STRUCT_SIZE_2FLOAT = struct.calcsize('2f')
STRUCT_SIZE_3FLOAT = struct.calcsize('3f')
STRUCT_SIZE_4FLOAT = struct.calcsize('4f')
STRUCT_SIZE_UNSIGNED_SHORT = struct.calcsize('H')
STRUCT_SIZE_4UNSIGNED_SHORT = struct.calcsize('4H')
STRUCT_SIZE_4x3MAT = struct.calcsize('ffffffffffff')
_STRUCT_SIZE_4x3MAT = struct.calcsize('fffffffffffff')
# STRUCT_SIZE_4x3MAT = calcsize('ffffffffffff')
# print STRUCT_SIZE_4x3MAT, ' STRUCT_SIZE_4x3MAT'
# only init once
object_list = [] # for hierarchy
object_parent = [] # index of parent in hierarchy, 0xFFFF = no parent
def putContextMesh(myContextMesh_vertls, myContextMesh_facels, myContextMeshMaterials):
bmesh = bpy.data.meshes.new(contextObName)
if myContextMesh_facels is None:
myContextMesh_facels = []
if myContextMesh_vertls:
bmesh.vertices.add(len(myContextMesh_vertls)//3)
bmesh.faces.add(len(myContextMesh_facels))
bmesh.vertices.foreach_set("co", myContextMesh_vertls)
eekadoodle_faces = []
for v1, v2, v3 in myContextMesh_facels:
eekadoodle_faces.extend([v3, v1, v2, 0] if v3 == 0 else [v1, v2, v3, 0])
bmesh.faces.foreach_set("vertices_raw", eekadoodle_faces)
if bmesh.faces and contextMeshUV:
bmesh.uv_textures.new()
uv_faces = bmesh.uv_textures.active.data[:]
else:
uv_faces = None
for mat_idx, (matName, faces) in enumerate(myContextMeshMaterials.items()):
if matName is None:
bmat = None
else:
bmat = MATDICT[matName][1]
img = TEXTURE_DICT.get(bmat.name)
bmesh.materials.append(bmat) # can be None
if uv_faces and img:
for fidx in faces:
bmesh.faces[fidx].material_index = mat_idx
uf = uv_faces[fidx]
uf.image = img
uf.use_image = True
else:
for fidx in faces:
bmesh.faces[fidx].material_index = mat_idx
if uv_faces:
for fidx, uf in enumerate(uv_faces):
face = myContextMesh_facels[fidx]
v1, v2, v3 = face
# eekadoodle
if v3 == 0:
v1, v2, v3 = v3, v1, v2
uf.uv1 = contextMeshUV[v1 * 2:(v1 * 2) + 2]
uf.uv2 = contextMeshUV[v2 * 2:(v2 * 2) + 2]
uf.uv3 = contextMeshUV[v3 * 2:(v3 * 2) + 2]
# always a tri
ob = bpy.data.objects.new(contextObName, bmesh)
object_dictionary[contextObName] = ob
SCN.objects.link(ob)
'''
if contextMatrix_tx:
ob.setMatrix(contextMatrix_tx)
'''
if contextMatrix_rot:
ob.matrix_local = contextMatrix_rot
importedObjects.append(ob)
bmesh.update()
#a spare chunk
new_chunk = chunk()
temp_chunk = chunk()
CreateBlenderObject = False
def read_float_color(temp_chunk):
temp_data = file.read(struct.calcsize('3f'))
temp_chunk.bytes_read += 12
return [float(col) for col in struct.unpack('<3f', temp_data)]
def read_byte_color(temp_chunk):
temp_data = file.read(struct.calcsize('3B'))
temp_chunk.bytes_read += 3
return [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
def read_texture(new_chunk, temp_chunk, name, mapto):
new_texture = bpy.data.textures.new(name, type='IMAGE')
img = None
while (new_chunk.bytes_read < new_chunk.length):
#print 'MAT_TEXTURE_MAP..while', new_chunk.bytes_read, new_chunk.length
read_chunk(file, temp_chunk)
if (temp_chunk.ID == MAT_MAP_FILEPATH):
texture_name, read_str_len = read_string(file)
img = TEXTURE_DICT[contextMaterial.name] = load_image(texture_name, dirname)
new_chunk.bytes_read += read_str_len #plus one for the null character that gets removed
else:
skip_to_end(file, temp_chunk)
new_chunk.bytes_read += temp_chunk.bytes_read
# add the map to the material in the right channel
if img:
add_texture_to_material(img, new_texture, contextMaterial, mapto)
dirname = os.path.dirname(file.name)
#loop through all the data for this chunk (previous chunk) and see what it is
while (previous_chunk.bytes_read < previous_chunk.length):
#print '\t', previous_chunk.bytes_read, 'keep going'
#read the next chunk
#print 'reading a chunk'
read_chunk(file, new_chunk)
#is it a Version chunk?
if (new_chunk.ID == VERSION):
#print 'if (new_chunk.ID == VERSION):'
#print 'found a VERSION chunk'
#read in the version of the file
#it's an unsigned short (H)
temp_data = file.read(struct.calcsize('I'))
version = struct.unpack('<I', temp_data)[0]
new_chunk.bytes_read += 4 #read the 4 bytes for the version number
#this loader works with version 3 and below, but may not with 4 and above
if (version > 3):
print('\tNon-Fatal Error: Version greater than 3, may not load correctly: ', version)
#is it an object info chunk?
elif (new_chunk.ID == OBJECTINFO):
#print 'elif (new_chunk.ID == OBJECTINFO):'
# print 'found an OBJECTINFO chunk'
process_next_chunk(file, new_chunk, importedObjects, IMAGE_SEARCH)
#keep track of how much we read in the main chunk
new_chunk.bytes_read += temp_chunk.bytes_read
#is it an object chunk?
elif (new_chunk.ID == OBJECT):
if CreateBlenderObject:
putContextMesh(contextMesh_vertls, contextMesh_facels, contextMeshMaterials)
contextMesh_vertls = []; contextMesh_facels = []
## preparando para receber o proximo objeto
contextMeshMaterials = {} # matname:[face_idxs]
contextMeshUV = None
#contextMesh.vertexUV = 1 # Make sticky coords.
# Reset matrix
contextMatrix_rot = None
#contextMatrix_tx = None
CreateBlenderObject = True
contextObName, read_str_len = read_string(file)
new_chunk.bytes_read += read_str_len
#is it a material chunk?
elif (new_chunk.ID == MATERIAL):
# print("read material")
#print 'elif (new_chunk.ID == MATERIAL):'
contextMaterial = bpy.data.materials.new('Material')
elif (new_chunk.ID == MAT_NAME):
#print 'elif (new_chunk.ID == MAT_NAME):'
material_name, read_str_len = read_string(file)
# print("material name", material_name)
#plus one for the null character that ended the string
new_chunk.bytes_read += read_str_len
contextMaterial.name = material_name.rstrip() # remove trailing whitespace
MATDICT[material_name]= (contextMaterial.name, contextMaterial)
elif (new_chunk.ID == MAT_AMBIENT):
#print 'elif (new_chunk.ID == MAT_AMBIENT):'
read_chunk(file, temp_chunk)
if (temp_chunk.ID == MAT_FLOAT_COLOR):
contextMaterial.mirror_color = read_float_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3f'))
# temp_chunk.bytes_read += 12
# contextMaterial.mirCol = [float(col) for col in struct.unpack('<3f', temp_data)]
elif (temp_chunk.ID == MAT_24BIT_COLOR):
contextMaterial.mirror_color = read_byte_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3B'))
# temp_chunk.bytes_read += 3
# contextMaterial.mirCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
else:
skip_to_end(file, temp_chunk)
new_chunk.bytes_read += temp_chunk.bytes_read
elif (new_chunk.ID == MAT_DIFFUSE):
#print 'elif (new_chunk.ID == MAT_DIFFUSE):'
read_chunk(file, temp_chunk)
if (temp_chunk.ID == MAT_FLOAT_COLOR):
contextMaterial.diffuse_color = read_float_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3f'))
# temp_chunk.bytes_read += 12
# contextMaterial.rgbCol = [float(col) for col in struct.unpack('<3f', temp_data)]
elif (temp_chunk.ID == MAT_24BIT_COLOR):
contextMaterial.diffuse_color = read_byte_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3B'))
# temp_chunk.bytes_read += 3
# contextMaterial.rgbCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
else:
skip_to_end(file, temp_chunk)
# print("read material diffuse color", contextMaterial.diffuse_color)
new_chunk.bytes_read += temp_chunk.bytes_read
elif (new_chunk.ID == MAT_SPECULAR):
#print 'elif (new_chunk.ID == MAT_SPECULAR):'
read_chunk(file, temp_chunk)
if (temp_chunk.ID == MAT_FLOAT_COLOR):
contextMaterial.specular_color = read_float_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3f'))
# temp_chunk.bytes_read += 12
# contextMaterial.mirCol = [float(col) for col in struct.unpack('<3f', temp_data)]
elif (temp_chunk.ID == MAT_24BIT_COLOR):
contextMaterial.specular_color = read_byte_color(temp_chunk)
# temp_data = file.read(struct.calcsize('3B'))
# temp_chunk.bytes_read += 3
# contextMaterial.mirCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
else:
skip_to_end(file, temp_chunk)
new_chunk.bytes_read += temp_chunk.bytes_read
elif (new_chunk.ID == MAT_TEXTURE_MAP):
read_texture(new_chunk, temp_chunk, "Diffuse", "COLOR")
elif (new_chunk.ID == MAT_SPECULAR_MAP):
read_texture(new_chunk, temp_chunk, "Specular", "SPECULARITY")
elif (new_chunk.ID == MAT_OPACITY_MAP):
read_texture(new_chunk, temp_chunk, "Opacity", "ALPHA")
elif (new_chunk.ID == MAT_BUMP_MAP):
read_texture(new_chunk, temp_chunk, "Bump", "NORMAL")
elif (new_chunk.ID == MAT_TRANSPARENCY):
#print 'elif (new_chunk.ID == MAT_TRANSPARENCY):'
read_chunk(file, temp_chunk)
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
temp_chunk.bytes_read += 2
contextMaterial.alpha = 1-(float(struct.unpack('<H', temp_data)[0])/100)
new_chunk.bytes_read += temp_chunk.bytes_read
elif (new_chunk.ID == OBJECT_LAMP): # Basic lamp support.
temp_data = file.read(STRUCT_SIZE_3FLOAT)
x,y,z = struct.unpack('<3f', temp_data)
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT
# no lamp in dict that would be confusing
ob = bpy.data.objects.new("Lamp", bpy.data.lamps.new("Lamp"))
SCN.objects.link(ob)
contextLamp[1]= ob.data
# contextLamp[1]= bpy.data.lamps.new()
contextLamp[0]= ob
# contextLamp[0]= SCN_OBJECTS.new(contextLamp[1])
importedObjects.append(contextLamp[0])
#print 'number of faces: ', num_faces
#print x,y,z
contextLamp[0].location = (x, y, z)
# contextLamp[0].setLocation(x,y,z)
# Reset matrix
contextMatrix_rot = None
#contextMatrix_tx = None
#print contextLamp.name,
elif (new_chunk.ID == OBJECT_MESH):
# print 'Found an OBJECT_MESH chunk'
pass
elif (new_chunk.ID == OBJECT_VERTICES):
'''
Worldspace vertex locations
'''
# print 'elif (new_chunk.ID == OBJECT_VERTICES):'
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
num_verts = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += 2
# print 'number of verts: ', num_verts
contextMesh_vertls = struct.unpack('<%df' % (num_verts * 3), file.read(STRUCT_SIZE_3FLOAT * num_verts))
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT * num_verts
# dummyvert is not used atm!
#print 'object verts: bytes read: ', new_chunk.bytes_read
elif (new_chunk.ID == OBJECT_FACES):
# print 'elif (new_chunk.ID == OBJECT_FACES):'
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
num_faces = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += 2
#print 'number of faces: ', num_faces
# print '\ngetting a face'
temp_data = file.read(STRUCT_SIZE_4UNSIGNED_SHORT * num_faces)
new_chunk.bytes_read += STRUCT_SIZE_4UNSIGNED_SHORT * num_faces #4 short ints x 2 bytes each
contextMesh_facels = struct.unpack('<%dH' % (num_faces * 4), temp_data)
contextMesh_facels = [contextMesh_facels[i - 3:i] for i in range(3, (num_faces * 4) + 3, 4)]
elif (new_chunk.ID == OBJECT_MATERIAL):
# print 'elif (new_chunk.ID == OBJECT_MATERIAL):'
material_name, read_str_len = read_string(file)
new_chunk.bytes_read += read_str_len # remove 1 null character.
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
num_faces_using_mat = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * num_faces_using_mat)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * num_faces_using_mat
contextMeshMaterials[material_name]= struct.unpack("<%dH" % (num_faces_using_mat), temp_data)
#look up the material in all the materials
elif (new_chunk.ID == OBJECT_UV):
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
num_uv = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += 2
temp_data = file.read(STRUCT_SIZE_2FLOAT * num_uv)
new_chunk.bytes_read += STRUCT_SIZE_2FLOAT * num_uv
contextMeshUV = struct.unpack('<%df' % (num_uv * 2), temp_data)
elif (new_chunk.ID == OBJECT_TRANS_MATRIX):
# How do we know the matrix size? 54 == 4x4 48 == 4x3
temp_data = file.read(STRUCT_SIZE_4x3MAT)
data = list( struct.unpack('<ffffffffffff', temp_data) )
new_chunk.bytes_read += STRUCT_SIZE_4x3MAT
contextMatrix_rot = mathutils.Matrix((data[:3] + [0], \
data[3:6] + [0], \
data[6:9] + [0], \
data[9:] + [1], \
))
elif (new_chunk.ID == MAT_MAP_FILEPATH):
texture_name, read_str_len = read_string(file)
try:
TEXTURE_DICT[contextMaterial.name]
except:
#img = TEXTURE_DICT[contextMaterial.name]= BPyImage.comprehensiveImageLoad(texture_name, FILEPATH)
img = TEXTURE_DICT[contextMaterial.name] = load_image(texture_name, dirname)
# img = TEXTURE_DICT[contextMaterial.name]= BPyImage.comprehensiveImageLoad(texture_name, FILEPATH, PLACE_HOLDER=False, RECURSIVE=IMAGE_SEARCH)
new_chunk.bytes_read += read_str_len #plus one for the null character that gets removed
elif new_chunk.ID == EDITKEYFRAME:
pass
elif new_chunk.ID == ED_KEY_OBJECT_NODE: #another object is being processed
child = None
elif new_chunk.ID == EK_OB_NODE_HEADER:
object_name, read_str_len = read_string(file)
new_chunk.bytes_read += read_str_len
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 2)
new_chunk.bytes_read += 4
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
hierarchy = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += 2
child = object_dictionary.get(object_name)
if child is None:
child = bpy.data.objects.new(object_name, None) # create an empty object
SCN.objects.link(child)
object_list.append(child)
object_parent.append(hierarchy)
elif new_chunk.ID == EK_OB_INSTANCE_NAME:
object_name, read_str_len = read_string(file)
child.name = object_name
object_dictionary[object_name] = child
new_chunk.bytes_read += read_str_len
elif new_chunk.ID == EK_OB_POSITION_TRACK: # translation
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 5
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 5)
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nkeys = struct.unpack('<H', temp_data)[0]
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
for i in range(nkeys):
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nframe = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 2)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
temp_data = file.read(STRUCT_SIZE_3FLOAT)
loc = struct.unpack('<3f', temp_data)
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT
if nframe == 0:
child.location = loc
elif new_chunk.ID == EK_OB_ROTATION_TRACK: # rotation
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 5
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 5)
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nkeys = struct.unpack('<H', temp_data)[0]
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
for i in range(nkeys):
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nframe = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 2)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
temp_data = file.read(STRUCT_SIZE_4FLOAT)
rad,axis_x,axis_y,axis_z = struct.unpack('<4f', temp_data)
new_chunk.bytes_read += STRUCT_SIZE_4FLOAT
if nframe == 0:
child.rotation_euler = mathutils.Quaternion((axis_x, axis_y, axis_z), -rad).to_euler() # why negative?
elif new_chunk.ID == EK_OB_SCALE_TRACK: # translation
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 5
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 5)
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nkeys = struct.unpack('<H', temp_data)[0]
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
for i in range(nkeys):
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
nframe = struct.unpack('<H', temp_data)[0]
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * 2)
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * 2
temp_data = file.read(STRUCT_SIZE_3FLOAT)
sca = struct.unpack('<3f', temp_data)
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT
if nframe == 0:
child.scale = sca
else: #(new_chunk.ID!=VERSION or new_chunk.ID!=OBJECTINFO or new_chunk.ID!=OBJECT or new_chunk.ID!=MATERIAL):
# print 'skipping to end of this chunk'
#print("unknown chunk: "+hex(new_chunk.ID))
buffer_size = new_chunk.length - new_chunk.bytes_read
binary_format='%ic' % buffer_size
temp_data = file.read(struct.calcsize(binary_format))
new_chunk.bytes_read += buffer_size
#update the previous chunk bytes read
# print 'previous_chunk.bytes_read += new_chunk.bytes_read'
# print previous_chunk.bytes_read, new_chunk.bytes_read
previous_chunk.bytes_read += new_chunk.bytes_read
## print 'Bytes left in this chunk: ', previous_chunk.length - previous_chunk.bytes_read
# FINISHED LOOP
# There will be a number of objects still not added
if CreateBlenderObject:
putContextMesh(contextMesh_vertls, contextMesh_facels, contextMeshMaterials)
# Assign parents to objects
for ind, ob in enumerate(object_list):
parent = object_parent[ind]
if parent == ROOT_OBJECT:
ob.parent = None
else:
ob.parent = object_list[parent]
def load_3ds(filepath, context, IMPORT_CONSTRAIN_BOUNDS=10.0, IMAGE_SEARCH=True, APPLY_MATRIX=True):
global SCN
# XXX
# if BPyMessages.Error_NoFile(filepath):
# return
print("importing 3DS: %r..." % (filepath), end="")
time1 = time.clock()
# time1 = Blender.sys.time()
current_chunk = chunk()
file = open(filepath, 'rb')
#here we go!
# print 'reading the first chunk'
read_chunk(file, current_chunk)
if (current_chunk.ID!=PRIMARY):
print('\tFatal Error: Not a valid 3ds file: %r' % filepath)
file.close()
return
# IMPORT_AS_INSTANCE = Blender.Draw.Create(0)
# IMPORT_CONSTRAIN_BOUNDS = Blender.Draw.Create(10.0)
# IMAGE_SEARCH = Blender.Draw.Create(1)
# APPLY_MATRIX = Blender.Draw.Create(0)
# Get USER Options
# pup_block = [\
# ('Size Constraint:', IMPORT_CONSTRAIN_BOUNDS, 0.0, 1000.0, 'Scale the model by 10 until it reacehs the size constraint. Zero Disables.'),\
# ('Image Search', IMAGE_SEARCH, 'Search subdirs for any assosiated images (Warning, may be slow)'),\
# ('Transform Fix', APPLY_MATRIX, 'Workaround for object transformations importing incorrectly'),\
# #('Group Instance', IMPORT_AS_INSTANCE, 'Import objects into a new scene and group, creating an instance in the current scene.'),\
# ]
# if PREF_UI:
# if not Blender.Draw.PupBlock('Import 3DS...', pup_block):
# return
# Blender.Window.WaitCursor(1)
# IMPORT_CONSTRAIN_BOUNDS = IMPORT_CONSTRAIN_BOUNDS.val
# # IMPORT_AS_INSTANCE = IMPORT_AS_INSTANCE.val
# IMAGE_SEARCH = IMAGE_SEARCH.val
# APPLY_MATRIX = APPLY_MATRIX.val
if IMPORT_CONSTRAIN_BOUNDS:
BOUNDS_3DS[:]= [1<<30, 1<<30, 1<<30, -1<<30, -1<<30, -1<<30]
else:
BOUNDS_3DS[:]= []
##IMAGE_SEARCH
# fixme, make unglobal, clear incase
object_dictionary.clear()
scn = context.scene
# scn = bpy.data.scenes.active
SCN = scn
# SCN_OBJECTS = scn.objects
# SCN_OBJECTS.selected = [] # de select all
importedObjects = [] # Fill this list with objects
process_next_chunk(file, current_chunk, importedObjects, IMAGE_SEARCH)
# fixme, make unglobal
object_dictionary.clear()
# Link the objects into this scene.
# Layers = scn.Layers
# REMOVE DUMMYVERT, - remove this in the next release when blenders internal are fixed.
if APPLY_MATRIX:
for ob in importedObjects:
if ob.type == 'MESH':
me = ob.data
me.transform(ob.matrix_local.copy().invert())
# Done DUMMYVERT
"""
if IMPORT_AS_INSTANCE:
name = filepath.split('\\')[-1].split('/')[-1]
# Create a group for this import.
group_scn = Scene.New(name)
for ob in importedObjects:
group_scn.link(ob) # dont worry about the layers
grp = Blender.Group.New(name)
grp.objects = importedObjects
grp_ob = Object.New('Empty', name)
grp_ob.enableDupGroup = True
grp_ob.DupGroup = grp
scn.link(grp_ob)
grp_ob.Layers = Layers
grp_ob.sel = 1
else:
# Select all imported objects.
for ob in importedObjects:
scn.link(ob)
ob.Layers = Layers
ob.sel = 1
"""
if 0:
# if IMPORT_CONSTRAIN_BOUNDS!=0.0:
# Set bounds from objecyt bounding box
for ob in importedObjects:
if ob.type == 'MESH':
# if ob.type=='Mesh':
ob.makeDisplayList() # Why dosnt this update the bounds?
for v in ob.getBoundBox():
for i in (0,1,2):
if v[i] < BOUNDS_3DS[i]:
BOUNDS_3DS[i]= v[i] # min
if v[i] > BOUNDS_3DS[i + 3]:
BOUNDS_3DS[i + 3]= v[i] # min
# Get the max axis x/y/z
max_axis = max(BOUNDS_3DS[3]-BOUNDS_3DS[0], BOUNDS_3DS[4]-BOUNDS_3DS[1], BOUNDS_3DS[5]-BOUNDS_3DS[2])
# print max_axis
if max_axis < 1 << 30: # Should never be false but just make sure.
# Get a new scale factor if set as an option
SCALE = 1.0
while (max_axis * SCALE) > IMPORT_CONSTRAIN_BOUNDS:
SCALE/=10
# SCALE Matrix
SCALE_MAT = mathutils.Matrix.Scale(SCALE, 4)
for ob in importedObjects:
if ob.parent is None:
ob.matrix_world = ob.matrix_world * SCALE_MAT
# Done constraining to bounds.
# Select all new objects.
print(" done in %.4f sec." % (time.clock()-time1))
file.close()
def load(operator, context, filepath="", constrain_size=0.0, use_image_search=True, use_apply_transform=True):
load_3ds(filepath, context, IMPORT_CONSTRAIN_BOUNDS=constrain_size, IMAGE_SEARCH=use_image_search, APPLY_MATRIX=use_apply_transform)
return {'FINISHED'}

@ -1,109 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# To support reload properly, try to access a package var, if it's there, reload everything
if "bpy" in locals():
import imp
if "export_fbx" in locals():
imp.reload(export_fbx)
import bpy
from bpy.props import *
from io_utils import ExportHelper
class ExportFBX(bpy.types.Operator, ExportHelper):
'''Selection to an ASCII Autodesk FBX'''
bl_idname = "export_scene.fbx"
bl_label = "Export FBX"
bl_options = {'PRESET'}
filename_ext = ".fbx"
filter_glob = StringProperty(default="*.fbx", options={'HIDDEN'})
# List of operator properties, the attributes will be assigned
# to the class instance from the operator settings before calling.
EXP_OBS_SELECTED = BoolProperty(name="Selected Objects", description="Export selected objects on visible layers", default=True)
# EXP_OBS_SCENE = BoolProperty(name="Scene Objects", description="Export all objects in this scene", default=True)
TX_SCALE = FloatProperty(name="Scale", description="Scale all data, (Note! some imports dont support scaled armatures)", min=0.01, max=1000.0, soft_min=0.01, soft_max=1000.0, default=1.0)
TX_XROT90 = BoolProperty(name="Rot X90", description="Rotate all objects 90 degrees about the X axis", default=True)
TX_YROT90 = BoolProperty(name="Rot Y90", description="Rotate all objects 90 degrees about the Y axis", default=False)
TX_ZROT90 = BoolProperty(name="Rot Z90", description="Rotate all objects 90 degrees about the Z axis", default=False)
EXP_EMPTY = BoolProperty(name="Empties", description="Export empty objects", default=True)
EXP_CAMERA = BoolProperty(name="Cameras", description="Export camera objects", default=True)
EXP_LAMP = BoolProperty(name="Lamps", description="Export lamp objects", default=True)
EXP_ARMATURE = BoolProperty(name="Armatures", description="Export armature objects", default=True)
EXP_MESH = BoolProperty(name="Meshes", description="Export mesh objects", default=True)
EXP_MESH_APPLY_MOD = BoolProperty(name="Modifiers", description="Apply modifiers to mesh objects", default=True)
# EXP_MESH_HQ_NORMALS = BoolProperty(name="HQ Normals", description="Generate high quality normals", default=True)
EXP_IMAGE_COPY = BoolProperty(name="Copy Image Files", description="Copy image files to the destination path", default=False)
# armature animation
ANIM_ENABLE = BoolProperty(name="Enable Animation", description="Export keyframe animation", default=True)
ANIM_OPTIMIZE = BoolProperty(name="Optimize Keyframes", description="Remove double keyframes", default=True)
ANIM_OPTIMIZE_PRECISSION = FloatProperty(name="Precision", description="Tolerence for comparing double keyframes (higher for greater accuracy)", min=1, max=16, soft_min=1, soft_max=16, default=6.0)
# ANIM_ACTION_ALL = BoolProperty(name="Current Action", description="Use actions currently applied to the armatures (use scene start/end frame)", default=True)
ANIM_ACTION_ALL = BoolProperty(name="All Actions", description="Use all actions for armatures, if false, use current action", default=False)
# batch
BATCH_ENABLE = BoolProperty(name="Enable Batch", description="Automate exporting multiple scenes or groups to files", default=False)
BATCH_GROUP = BoolProperty(name="Group > File", description="Export each group as an FBX file, if false, export each scene as an FBX file", default=False)
BATCH_OWN_DIR = BoolProperty(name="Own Dir", description="Create a dir for each exported file", default=True)
BATCH_FILE_PREFIX = StringProperty(name="Prefix", description="Prefix each file with this name", maxlen=1024, default="")
def execute(self, context):
import math
from mathutils import Matrix
if not self.filepath:
raise Exception("filepath not set")
mtx4_x90n = Matrix.Rotation(-math.pi / 2.0, 4, 'X')
mtx4_y90n = Matrix.Rotation(-math.pi / 2.0, 4, 'Y')
mtx4_z90n = Matrix.Rotation(-math.pi / 2.0, 4, 'Z')
GLOBAL_MATRIX = Matrix()
GLOBAL_MATRIX[0][0] = GLOBAL_MATRIX[1][1] = GLOBAL_MATRIX[2][2] = self.TX_SCALE
if self.TX_XROT90:
GLOBAL_MATRIX = mtx4_x90n * GLOBAL_MATRIX
if self.TX_YROT90:
GLOBAL_MATRIX = mtx4_y90n * GLOBAL_MATRIX
if self.TX_ZROT90:
GLOBAL_MATRIX = mtx4_z90n * GLOBAL_MATRIX
keywords = self.as_keywords(ignore=("TX_XROT90", "TX_YROT90", "TX_ZROT90", "TX_SCALE", "check_existing", "filter_glob"))
keywords["GLOBAL_MATRIX"] = GLOBAL_MATRIX
import io_scene_fbx.export_fbx
return io_scene_fbx.export_fbx.save(self, context, **keywords)
def menu_func(self, context):
self.layout.operator(ExportFBX.bl_idname, text="Autodesk FBX (.fbx)")
def register():
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.types.INFO_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()

File diff suppressed because it is too large Load Diff

@ -1,131 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# To support reload properly, try to access a package var, if it's there, reload everything
if "bpy" in locals():
import imp
if "import_obj" in locals():
imp.reload(import_obj)
if "export_obj" in locals():
imp.reload(export_obj)
import bpy
from bpy.props import *
from io_utils import ExportHelper, ImportHelper
class ImportOBJ(bpy.types.Operator, ImportHelper):
'''Load a Wavefront OBJ File'''
bl_idname = "import_scene.obj"
bl_label = "Import OBJ"
filename_ext = ".obj"
filter_glob = StringProperty(default="*.obj;*.mtl", options={'HIDDEN'})
CREATE_SMOOTH_GROUPS = BoolProperty(name="Smooth Groups", description="Surround smooth groups by sharp edges", default=True)
CREATE_FGONS = BoolProperty(name="NGons as FGons", description="Import faces with more then 4 verts as fgons", default=True)
CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default=True)
SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default=True)
SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default=True)
# old comment: only used for user feedback
# disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
# KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
ROTATE_X90 = BoolProperty(name="-X90", description="Rotate X 90.", default=True)
CLAMP_SIZE = FloatProperty(name="Clamp Scale", description="Clamp the size to this maximum (Zero to Disable)", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=0.0)
POLYGROUPS = BoolProperty(name="Poly Groups", description="Import OBJ groups as vertex groups.", default=True)
IMAGE_SEARCH = BoolProperty(name="Image Search", description="Search subdirs for any assosiated images (Warning, may be slow)", default=True)
def execute(self, context):
# print("Selected: " + context.active_object.name)
from . import import_obj
return import_obj.load(self, context, **self.as_keywords(ignore=("filter_glob",)))
class ExportOBJ(bpy.types.Operator, ExportHelper):
'''Save a Wavefront OBJ File'''
bl_idname = "export_scene.obj"
bl_label = 'Export OBJ'
bl_options = {'PRESET'}
filename_ext = ".obj"
filter_glob = StringProperty(default="*.obj;*.mtl", options={'HIDDEN'})
# List of operator properties, the attributes will be assigned
# to the class instance from the operator settings before calling.
# context group
use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default=False)
use_all_scenes = BoolProperty(name="All Scenes", description="", default=False)
use_animation = BoolProperty(name="Animation", description="", default=False)
# object group
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply modifiers (preview resolution)", default=True)
use_rotate_x90 = BoolProperty(name="Rotate X90", description="", default=True)
# extra data group
use_edges = BoolProperty(name="Edges", description="", default=True)
use_normals = BoolProperty(name="Normals", description="", default=False)
use_hq_normals = BoolProperty(name="High Quality Normals", description="", default=True)
use_uvs = BoolProperty(name="UVs", description="", default=True)
use_materials = BoolProperty(name="Materials", description="", default=True)
copy_images = BoolProperty(name="Copy Images", description="", default=False)
use_triangles = BoolProperty(name="Triangulate", description="", default=False)
use_vertex_groups = BoolProperty(name="Polygroups", description="", default=False)
use_nurbs = BoolProperty(name="Nurbs", description="", default=False)
# grouping group
use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default=True)
group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default=False)
group_by_material = BoolProperty(name="Material Groups", description="", default=False)
keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default=False)
def execute(self, context):
from . import export_obj
return export_obj.save(self, context, **self.as_keywords(ignore=("check_existing", "filter_glob")))
def menu_func_import(self, context):
self.layout.operator(ImportOBJ.bl_idname, text="Wavefront (.obj)")
def menu_func_export(self, context):
self.layout.operator(ExportOBJ.bl_idname, text="Wavefront (.obj)")
def register():
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
# CONVERSION ISSUES
# - matrix problem
# - duplis - only tested dupliverts
# - all scenes export
# + normals calculation
if __name__ == "__main__":
register()

@ -1,836 +0,0 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import os
import time
import shutil
import bpy
import mathutils
def fixName(name):
if name is None:
return 'None'
else:
return name.replace(' ', '_')
def write_mtl(scene, filepath, copy_images, mtl_dict):
world = scene.world
if world:
worldAmb = world.ambient_color[:]
else:
worldAmb = 0.0, 0.0, 0.0
dest_dir = os.path.dirname(filepath)
def copy_image(image):
fn = bpy.path.abspath(image.filepath)
fn = os.path.normpath(fn)
fn_strip = os.path.basename(fn)
if copy_images:
rel = fn_strip
fn_abs_dest = os.path.join(dest_dir, fn_strip)
if not os.path.exists(fn_abs_dest):
shutil.copy(fn, fn_abs_dest)
elif bpy.path.is_subdir(fn, dest_dir):
rel = os.path.relpath(fn, dest_dir)
else:
rel = fn
return rel
file = open(filepath, "w", encoding='utf8')
file.write('# Blender MTL File: %r\n' % os.path.basename(bpy.data.filepath))
file.write('# Material Count: %i\n' % len(mtl_dict))
# Write material/image combinations we have used.
for key, (mtl_mat_name, mat, img) in mtl_dict.items():
# Get the Blender data for the material and the image.
# Having an image named None will make a bug, dont do it :)
file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
if mat:
file.write('Ns %.6f\n' % ((mat.specular_hardness-1) * 1.9607843137254901)) # Hardness, convert blenders 1-511 to MTL's
file.write('Ka %.6f %.6f %.6f\n' % tuple(c * mat.ambient for c in worldAmb)) # Ambient, uses mirror colour,
file.write('Kd %.6f %.6f %.6f\n' % tuple(c * mat.diffuse_intensity for c in mat.diffuse_color)) # Diffuse
file.write('Ks %.6f %.6f %.6f\n' % tuple(c * mat.specular_intensity for c in mat.specular_color)) # Specular
if hasattr(mat, "ior"):
file.write('Ni %.6f\n' % mat.ior) # Refraction index
else:
file.write('Ni %.6f\n' % 1.0)
file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
# 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
if mat.use_shadeless:
file.write('illum 0\n') # ignore lighting
elif mat.specular_intensity == 0:
file.write('illum 1\n') # no specular.
else:
file.write('illum 2\n') # light normaly
else:
#write a dummy material here?
file.write('Ns 0\n')
file.write('Ka %.6f %.6f %.6f\n' % tuple(c for c in worldAmb)) # Ambient, uses mirror colour,
file.write('Kd 0.8 0.8 0.8\n')
file.write('Ks 0.8 0.8 0.8\n')
file.write('d 1\n') # No alpha
file.write('illum 2\n') # light normaly
# Write images!
if img: # We have an image on the face!
# write relative image path
rel = copy_image(img)
file.write('map_Kd %s\n' % rel) # Diffuse mapping image
# file.write('map_Kd %s\n' % img.filepath.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
elif mat: # No face image. if we havea material search for MTex image.
for mtex in mat.texture_slots:
if mtex and mtex.texture.type == 'IMAGE':
try:
filepath = copy_image(mtex.texture.image)
# filepath = mtex.texture.image.filepath.split('\\')[-1].split('/')[-1]
file.write('map_Kd %s\n' % repr(filepath)[1:-1]) # Diffuse mapping image
break
except:
# Texture has no image though its an image type, best ignore.
pass
file.write('\n\n')
file.close()
# XXX not used
def copy_file(source, dest):
file = open(source, 'rb')
data = file.read()
file.close()
file = open(dest, 'wb')
file.write(data)
file.close()
# XXX not used
def copy_images(dest_dir):
if dest_dir[-1] != os.sep:
dest_dir += os.sep
# Get unique image names
uniqueImages = {}
for matname, mat, image in mtl_dict.values(): # Only use image name
# Get Texface images
if image:
uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
# Get MTex images
if mat:
for mtex in mat.texture_slots:
if mtex and mtex.texture.type == 'IMAGE':
image_tex = mtex.texture.image
if image_tex:
try:
uniqueImages[image_tex] = image_tex
except:
pass
# Now copy images
copyCount = 0
# for bImage in uniqueImages.values():
# image_path = bpy.path.abspath(bImage.filepath)
# if bpy.sys.exists(image_path):
# # Make a name for the target path.
# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
# if not bpy.utils.exists(dest_image_path): # Image isnt already there
# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
# copy_file(image_path, dest_image_path)
# copyCount+=1
# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
print('\tCopied %d images' % copyCount)
def test_nurbs_compat(ob):
if ob.type != 'CURVE':
return False
for nu in ob.data.splines:
if nu.point_count_v == 1 and nu.type != 'BEZIER': # not a surface and not bezier
return True
return False
def write_nurb(file, ob, ob_mat):
tot_verts = 0
cu = ob.data
# use negative indices
for nu in cu.splines:
if nu.type == 'POLY':
DEG_ORDER_U = 1
else:
DEG_ORDER_U = nu.order_u - 1 # odd but tested to be correct
if nu.type == 'BEZIER':
print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
continue
if nu.point_count_v > 1:
print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
continue
if len(nu.points) <= DEG_ORDER_U:
print("\tWarning, order_u is lower then vert count, skipping:", ob.name)
continue
pt_num = 0
do_closed = nu.use_cyclic_u
do_endpoints = (do_closed == 0) and nu.use_endpoint_u
for pt in nu.points:
pt = ob_mat * pt.co.copy().resize3D()
file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
pt_num += 1
tot_verts += pt_num
file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
file.write('cstype bspline\n') # not ideal, hard coded
file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
curve_ls = [-(i+1) for i in range(pt_num)]
# 'curv' keyword
if do_closed:
if DEG_ORDER_U == 1:
pt_num += 1
curve_ls.append(-1)
else:
pt_num += DEG_ORDER_U
curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
file.write('curv 0.0 1.0 %s\n' % (' '.join([str(i) for i in curve_ls]))) # Blender has no U and V values for the curve
# 'parm' keyword
tot_parm = (DEG_ORDER_U + 1) + pt_num
tot_parm_div = float(tot_parm-1)
parm_ls = [(i/tot_parm_div) for i in range(tot_parm)]
if do_endpoints: # end points, force param
for i in range(DEG_ORDER_U+1):
parm_ls[i] = 0.0
parm_ls[-(1+i)] = 1.0
file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
file.write('end\n')
return tot_verts
def write_file(filepath, objects, scene,
EXPORT_TRI=False,
EXPORT_EDGES=False,
EXPORT_NORMALS=False,
EXPORT_NORMALS_HQ=False,
EXPORT_UV=True,
EXPORT_MTL=True,
EXPORT_COPY_IMAGES=False,
EXPORT_APPLY_MODIFIERS=True,
EXPORT_ROTX90=True,
EXPORT_BLEN_OBS=True,
EXPORT_GROUP_BY_OB=False,
EXPORT_GROUP_BY_MAT=False,
EXPORT_KEEP_VERT_ORDER=False,
EXPORT_POLYGROUPS=False,
EXPORT_CURVE_AS_NURBS=True):
'''
Basic write function. The context and options must be already set
This can be accessed externaly
eg.
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
'''
# XXX
import math
def veckey3d(v):
return round(v.x, 6), round(v.y, 6), round(v.z, 6)
def veckey2d(v):
return round(v[0], 6), round(v[1], 6)
def findVertexGroupName(face, vWeightMap):
"""
Searches the vertexDict to see what groups is assigned to a given face.
We use a frequency system in order to sort out the name because a given vetex can
belong to two or more groups at the same time. To find the right name for the face
we list all the possible vertex group names with their frequency and then sort by
frequency in descend order. The top element is the one shared by the highest number
of vertices is the face's group
"""
weightDict = {}
for vert_index in face.vertices:
# for vert in face:
vWeights = vWeightMap[vert_index]
# vWeights = vWeightMap[vert]
for vGroupName, weight in vWeights:
weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
if weightDict:
alist = [(weight,vGroupName) for vGroupName, weight in weightDict.items()] # sort least to greatest amount of weight
alist.sort()
return(alist[-1][1]) # highest value last
else:
return '(null)'
print('OBJ Export path: %r' % filepath)
temp_mesh_name = '~tmp-mesh'
time1 = time.clock()
# time1 = sys.time()
# scn = Scene.GetCurrent()
file = open(filepath, "w")
# Write Header
file.write('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
file.write('# www.blender.org\n')
# Tell the obj file what material file to use.
if EXPORT_MTL:
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
file.write('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1]) # filepath can contain non utf8 chars, use repr
if EXPORT_ROTX90:
mat_xrot90= mathutils.Matrix.Rotation(-math.pi/2, 4, 'X')
# Initialize totals, these are updated each object
totverts = totuvco = totno = 1
face_vert_index = 1
globalNormals = {}
# A Dict of Materials
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
mtl_dict = {}
# Get all meshes
for ob_main in objects:
# ignore dupli children
if ob_main.parent and ob_main.parent.dupli_type != 'NONE':
# XXX
print(ob_main.name, 'is a dupli child - ignoring')
continue
obs = []
if ob_main.dupli_type != 'NONE':
# XXX
print('creating dupli_list on', ob_main.name)
ob_main.create_dupli_list(scene)
obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
# XXX debug print
print(ob_main.name, 'has', len(obs), 'dupli children')
else:
obs = [(ob_main, ob_main.matrix_world)]
for ob, ob_mat in obs:
# Nurbs curve support
if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
if EXPORT_ROTX90:
ob_mat = ob_mat * mat_xrot90
totverts += write_nurb(file, ob, ob_mat)
continue
# END NURBS
if ob.type != 'MESH':
continue
me = ob.create_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW')
if EXPORT_ROTX90:
me.transform(mat_xrot90 * ob_mat)
else:
me.transform(ob_mat)
# # Will work for non meshes now! :)
# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
# if not me:
# continue
if EXPORT_UV:
faceuv = len(me.uv_textures) > 0
if faceuv:
uv_layer = me.uv_textures.active.data[:]
else:
faceuv = False
me_verts = me.vertices[:]
# Make our own list so it can be sorted to reduce context switching
face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
# faces = [ f for f in me.faces ]
if EXPORT_EDGES:
edges = me.edges
else:
edges = []
if not (len(face_index_pairs)+len(edges)+len(me.vertices)): # Make sure there is somthing to write
# clean up
bpy.data.meshes.remove(me)
continue # dont bother with this mesh.
# XXX
# High Quality Normals
if EXPORT_NORMALS and face_index_pairs:
me.calc_normals()
# if EXPORT_NORMALS_HQ:
# BPyMesh.meshCalcNormals(me)
# else:
# # transforming normals is incorrect
# # when the matrix is scaled,
# # better to recalculate them
# me.calcNormals()
materials = me.materials
materialNames = []
materialItems = [m for m in materials]
if materials:
for mat in materials:
if mat:
materialNames.append(mat.name)
else:
materialNames.append(None)
# Cant use LC because some materials are None.
# materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
# Possible there null materials, will mess up indicies
# but at least it will export, wait until Blender gets fixed.
materialNames.extend((16-len(materialNames)) * [None])
materialItems.extend((16-len(materialItems)) * [None])
# Sort by Material, then images
# so we dont over context switch in the obj file.
if EXPORT_KEEP_VERT_ORDER:
pass
elif faceuv:
face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(uv_layer[a[1]].image), a[0].use_smooth))
elif len(materials) > 1:
face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].use_smooth))
else:
# no materials
face_index_pairs.sort(key = lambda a: a[0].use_smooth)
# if EXPORT_KEEP_VERT_ORDER:
# pass
# elif faceuv:
# try: faces.sort(key = lambda a: (a.mat, a.image, a.use_smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.use_smooth), (b.mat, b.image, b.use_smooth)))
# elif len(materials) > 1:
# try: faces.sort(key = lambda a: (a.mat, a.use_smooth))
# except: faces.sort(lambda a,b: cmp((a.mat, a.use_smooth), (b.mat, b.use_smooth)))
# else:
# # no materials
# try: faces.sort(key = lambda a: a.use_smooth)
# except: faces.sort(lambda a,b: cmp(a.use_smooth, b.use_smooth))
# Set the default mat to no material and no image.
contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
name1 = ob.name
name2 = ob.data.name
if name1 == name2:
obnamestring = fixName(name1)
else:
obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
if EXPORT_BLEN_OBS:
file.write('o %s\n' % obnamestring) # Write Object name
else: # if EXPORT_GROUP_BY_OB:
file.write('g %s\n' % obnamestring)
# Vert
for v in me_verts:
file.write('v %.6f %.6f %.6f\n' % v.co[:])
# UV
if faceuv:
uv_face_mapping = [[0,0,0,0] for i in range(len(face_index_pairs))] # a bit of a waste for tri's :/
uv_dict = {} # could use a set() here
uv_layer = me.uv_textures.active.data
for f, f_index in face_index_pairs:
for uv_index, uv in enumerate(uv_layer[f_index].uv):
uvkey = veckey2d(uv)
try:
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
except:
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
file.write('vt %.6f %.6f\n' % uv[:])
uv_unique_count = len(uv_dict)
# del uv, uvkey, uv_dict, f_index, uv_index
# Only need uv_unique_count and uv_face_mapping
# NORMAL, Smooth/Non smoothed.
if EXPORT_NORMALS:
for f, f_index in face_index_pairs:
if f.use_smooth:
for v_idx in f.vertices:
v = me_verts[v_idx]
noKey = veckey3d(v.normal)
if noKey not in globalNormals:
globalNormals[noKey] = totno
totno +=1
file.write('vn %.6f %.6f %.6f\n' % noKey)
else:
# Hard, 1 normal from the face.
noKey = veckey3d(f.normal)
if noKey not in globalNormals:
globalNormals[noKey] = totno
totno +=1
file.write('vn %.6f %.6f %.6f\n' % noKey)
if not faceuv:
f_image = None
# XXX
if EXPORT_POLYGROUPS:
# Retrieve the list of vertex groups
vertGroupNames = [g.name for g in ob.vertex_groups]
currentVGroup = ''
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
vgroupsMap = [[] for _i in range(len(me_verts))]
for v_idx, v in enumerate(me.vertices):
for g in v.groups:
vgroupsMap[v_idx].append((vertGroupNames[g.group], g.weight))
for f, f_index in face_index_pairs:
f_smooth= f.use_smooth
f_mat = min(f.material_index, len(materialNames)-1)
if faceuv:
tface = uv_layer[f_index]
f_image = tface.image
# MAKE KEY
if faceuv and f_image: # Object is always true.
key = materialNames[f_mat], f_image.name
else:
key = materialNames[f_mat], None # No image, use None instead.
# Write the vertex group
if EXPORT_POLYGROUPS:
if ob.vertex_groups:
# find what vertext group the face belongs to
theVGroup = findVertexGroupName(f,vgroupsMap)
if theVGroup != currentVGroup:
currentVGroup = theVGroup
file.write('g %s\n' % theVGroup)
# CHECK FOR CONTEXT SWITCH
if key == contextMat:
pass # Context already switched, dont do anything
else:
if key[0] is None and key[1] is None:
# Write a null material, since we know the context has changed.
if EXPORT_GROUP_BY_MAT:
# can be mat_image or (null)
file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) ) # can be mat_image or (null)
file.write('usemtl (null)\n') # mat, image
else:
mat_data= mtl_dict.get(key)
if not mat_data:
# First add to global dict so we can export to mtl
# Then write mtl
# Make a new names from the mat and image name,
# converting any spaces to underscores with fixName.
# If none image dont bother adding it to the name
if key[1] is None:
mat_data = mtl_dict[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
else:
mat_data = mtl_dict[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
if EXPORT_GROUP_BY_MAT:
file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
contextMat = key
if f_smooth != contextSmooth:
if f_smooth: # on now off
file.write('s 1\n')
contextSmooth = f_smooth
else: # was off now on
file.write('s off\n')
contextSmooth = f_smooth
f_v_orig = [(vi, me_verts[v_idx]) for vi, v_idx in enumerate(f.vertices)]
if not EXPORT_TRI or len(f_v_orig) == 3:
f_v_iter = (f_v_orig, )
else:
f_v_iter = (f_v_orig[0], f_v_orig[1], f_v_orig[2]), (f_v_orig[0], f_v_orig[2], f_v_orig[3])
# support for triangulation
for f_v in f_v_iter:
file.write('f')
if faceuv:
if EXPORT_NORMALS:
if f_smooth: # Smoothed, use vertex normals
for vi, v in f_v:
file.write( ' %d/%d/%d' % \
(v.index + totverts,
totuvco + uv_face_mapping[f_index][vi],
globalNormals[ veckey3d(v.normal) ]) ) # vert, uv, normal
else: # No smoothing, face normals
no = globalNormals[ veckey3d(f.normal) ]
for vi, v in f_v:
file.write( ' %d/%d/%d' % \
(v.index + totverts,
totuvco + uv_face_mapping[f_index][vi],
no) ) # vert, uv, normal
else: # No Normals
for vi, v in f_v:
file.write( ' %d/%d' % (\
v.index + totverts,\
totuvco + uv_face_mapping[f_index][vi])) # vert, uv
face_vert_index += len(f_v)
else: # No UV's
if EXPORT_NORMALS:
if f_smooth: # Smoothed, use vertex normals
for vi, v in f_v:
file.write( ' %d//%d' %
(v.index + totverts, globalNormals[ veckey3d(v.normal) ]) )
else: # No smoothing, face normals
no = globalNormals[ veckey3d(f.normal) ]
for vi, v in f_v:
file.write( ' %d//%d' % (v.index + totverts, no) )
else: # No Normals
for vi, v in f_v:
file.write( ' %d' % (v.index + totverts) )
file.write('\n')
# Write edges.
if EXPORT_EDGES:
for ed in edges:
if ed.is_loose:
file.write('f %d %d\n' % (ed.vertices[0] + totverts, ed.vertices[1] + totverts))
# Make the indicies global rather then per mesh
totverts += len(me_verts)
if faceuv:
totuvco += uv_unique_count
# clean up
bpy.data.meshes.remove(me)
if ob_main.dupli_type != 'NONE':
ob_main.free_dupli_list()
file.close()
# Now we have all our materials, save them
if EXPORT_MTL:
write_mtl(scene, mtlfilepath, EXPORT_COPY_IMAGES, mtl_dict)
# if EXPORT_COPY_IMAGES:
# dest_dir = os.path.basename(filepath)
# # dest_dir = filepath
# # # Remove chars until we are just the path.
# # while dest_dir and dest_dir[-1] not in '\\/':
# # dest_dir = dest_dir[:-1]
# if dest_dir:
# copy_images(dest_dir, mtl_dict)
# else:
# print('\tError: "%s" could not be used as a base for an image path.' % filepath)
print("OBJ Export time: %.2f" % (time.clock() - time1))
#
def _write(context, filepath,
EXPORT_TRI, # ok
EXPORT_EDGES,
EXPORT_NORMALS, # not yet
EXPORT_NORMALS_HQ, # not yet
EXPORT_UV, # ok
EXPORT_MTL,
EXPORT_COPY_IMAGES,
EXPORT_APPLY_MODIFIERS, # ok
EXPORT_ROTX90, # wrong
EXPORT_BLEN_OBS,
EXPORT_GROUP_BY_OB,
EXPORT_GROUP_BY_MAT,
EXPORT_KEEP_VERT_ORDER,
EXPORT_POLYGROUPS,
EXPORT_CURVE_AS_NURBS,
EXPORT_SEL_ONLY, # ok
EXPORT_ALL_SCENES, # XXX not working atm
EXPORT_ANIMATION): # Not used
base_name, ext = os.path.splitext(filepath)
context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
orig_scene = context.scene
# Exit edit mode before exporting, so current object states are exported properly.
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
# if EXPORT_ALL_SCENES:
# export_scenes = bpy.data.scenes
# else:
# export_scenes = [orig_scene]
# XXX only exporting one scene atm since changing
# current scene is not possible.
# Brecht says that ideally in 2.5 we won't need such a function,
# allowing multiple scenes open at once.
export_scenes = [orig_scene]
# Export all scenes.
for scene in export_scenes:
# scene.makeCurrent() # If already current, this is not slow.
# context = scene.getRenderingContext()
orig_frame = scene.frame_current
if EXPORT_ALL_SCENES: # Add scene name into the context_name
context_name[1] = '_%s' % bpy.path.clean_name(scene.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
# Export an animation?
if EXPORT_ANIMATION:
scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
else:
scene_frames = [orig_frame] # Dont export an animation.
# Loop through all frames in the scene and export.
for frame in scene_frames:
if EXPORT_ANIMATION: # Add frame to the filepath.
context_name[2] = '_%.6d' % frame
scene.frame_set(frame, 0.0)
if EXPORT_SEL_ONLY:
objects = context.selected_objects
else:
objects = scene.objects
full_path = ''.join(context_name)
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
# EXPORT THE FILE.
write_file(full_path, objects, scene,
EXPORT_TRI,
EXPORT_EDGES,
EXPORT_NORMALS,
EXPORT_NORMALS_HQ,
EXPORT_UV,
EXPORT_MTL,
EXPORT_COPY_IMAGES,
EXPORT_APPLY_MODIFIERS,
EXPORT_ROTX90,
EXPORT_BLEN_OBS,
EXPORT_GROUP_BY_OB,
EXPORT_GROUP_BY_MAT,
EXPORT_KEEP_VERT_ORDER,
EXPORT_POLYGROUPS,
EXPORT_CURVE_AS_NURBS)
scene.frame_set(orig_frame, 0.0)
# Restore old active scene.
# orig_scene.makeCurrent()
# Window.WaitCursor(0)
'''
Currently the exporter lacks these features:
* multiple scene export (only active scene is written)
* particles
'''
def save(operator, context, filepath="",
use_triangles=False,
use_edges=True,
use_normals=False,
use_hq_normals=False,
use_uvs=True,
use_materials=True,
copy_images=False,
use_modifiers=True,
use_rotate_x90=True,
use_blen_objects=True,
group_by_object=False,
group_by_material=False,
keep_vertex_order=False,
use_vertex_groups=False,
use_nurbs=True,
use_selection=True,
use_all_scenes=False,
use_animation=False,
):
_write(context, filepath,
EXPORT_TRI=use_triangles,
EXPORT_EDGES=use_edges,
EXPORT_NORMALS=use_normals,
EXPORT_NORMALS_HQ=use_hq_normals,
EXPORT_UV=use_uvs,
EXPORT_MTL=use_materials,
EXPORT_COPY_IMAGES=copy_images,
EXPORT_APPLY_MODIFIERS=use_modifiers,
EXPORT_ROTX90=use_rotate_x90,
EXPORT_BLEN_OBS=use_blen_objects,
EXPORT_GROUP_BY_OB=group_by_object,
EXPORT_GROUP_BY_MAT=group_by_material,
EXPORT_KEEP_VERT_ORDER=keep_vertex_order,
EXPORT_POLYGROUPS=use_vertex_groups,
EXPORT_CURVE_AS_NURBS=use_nurbs,
EXPORT_SEL_ONLY=use_selection,
EXPORT_ALL_SCENES=use_all_scenes,
EXPORT_ANIMATION=use_animation,
)
return {'FINISHED'}

File diff suppressed because it is too large Load Diff

@ -1,9 +1,6 @@
# Configuration Blender
import bpy
wm = bpy.context.window_manager
wm.keyconfigs.active = wm.keyconfigs['Blender']
bpy.context.user_preferences.view.use_mouse_auto_depth = False
bpy.context.user_preferences.view.use_zoom_to_mouse = False
bpy.context.user_preferences.view.use_rotate_around_active = False

@ -0,0 +1,10 @@
# Configuration Blender
import bpy
bpy.context.user_preferences.edit.use_drag_immediately = True
bpy.context.user_preferences.edit.use_insertkey_xyz_to_rgb = False
bpy.context.user_preferences.inputs.select_mouse = 'LEFT'
bpy.context.user_preferences.inputs.view_zoom_method = 'DOLLY'
bpy.context.user_preferences.inputs.view_zoom_axis = 'HORIZONTAL'
bpy.context.user_preferences.inputs.view_rotate_method = 'TURNTABLE'
bpy.context.user_preferences.inputs.invert_mouse_wheel_zoom = True

@ -7,380 +7,372 @@ kc = wm.keyconfigs.new('maya')
# Map 3D View
km = kc.keymaps.new('3D View', space_type='VIEW_3D', region_type='WINDOW', modal=False)
kmi = km.items.new('view3d.manipulator', 'LEFTMOUSE', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.manipulator', 'LEFTMOUSE', 'PRESS', any=True)
kmi.properties.release_confirm = True
kmi = km.items.new('view3d.cursor3d', 'ACTIONMOUSE', 'PRESS')
kmi = km.items.new('view3d.rotate', 'LEFTMOUSE', 'PRESS', alt=True)
kmi = km.items.new('view3d.move', 'MIDDLEMOUSE', 'PRESS', alt=True)
kmi = km.items.new('view3d.zoom', 'RIGHTMOUSE', 'PRESS', alt=True)
kmi = km.items.new('view3d.view_selected', 'NUMPAD_PERIOD', 'PRESS')
kmi = km.items.new('view3d.view_center_cursor', 'NUMPAD_PERIOD', 'PRESS', ctrl=True)
kmi = km.items.new('view3d.fly', 'F', 'PRESS', shift=True)
kmi = km.items.new('view3d.smoothview', 'TIMER1', 'ANY', any=True)
kmi = km.items.new('view3d.rotate', 'TRACKPADPAN', 'ANY', alt=True)
kmi = km.items.new('view3d.rotate', 'MOUSEROTATE', 'ANY')
kmi = km.items.new('view3d.move', 'TRACKPADPAN', 'ANY')
kmi = km.items.new('view3d.zoom', 'TRACKPADZOOM', 'ANY')
kmi = km.items.new('view3d.zoom', 'NUMPAD_PLUS', 'PRESS')
kmi = km.keymap_items.new('view3d.cursor3d', 'ACTIONMOUSE', 'PRESS')
kmi = km.keymap_items.new('view3d.rotate', 'LEFTMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.move', 'MIDDLEMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.zoom', 'RIGHTMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.view_selected', 'NUMPAD_PERIOD', 'PRESS')
kmi = km.keymap_items.new('view3d.view_center_cursor', 'NUMPAD_PERIOD', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.fly', 'F', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.smoothview', 'TIMER1', 'ANY', any=True)
kmi = km.keymap_items.new('view3d.rotate', 'TRACKPADPAN', 'ANY', alt=True)
kmi = km.keymap_items.new('view3d.rotate', 'MOUSEROTATE', 'ANY')
kmi = km.keymap_items.new('view3d.move', 'TRACKPADPAN', 'ANY')
kmi = km.keymap_items.new('view3d.zoom', 'TRACKPADZOOM', 'ANY')
kmi = km.keymap_items.new('view3d.zoom', 'NUMPAD_PLUS', 'PRESS')
kmi.properties.delta = 1
kmi = km.items.new('view3d.zoom', 'NUMPAD_MINUS', 'PRESS')
kmi = km.keymap_items.new('view3d.zoom', 'NUMPAD_MINUS', 'PRESS')
kmi.properties.delta = -1
kmi = km.items.new('view3d.zoom', 'EQUAL', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.zoom', 'EQUAL', 'PRESS', ctrl=True)
kmi.properties.delta = 1
kmi = km.items.new('view3d.zoom', 'MINUS', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.zoom', 'MINUS', 'PRESS', ctrl=True)
kmi.properties.delta = -1
kmi = km.items.new('view3d.zoom', 'WHEELINMOUSE', 'PRESS')
kmi = km.keymap_items.new('view3d.zoom', 'WHEELINMOUSE', 'PRESS')
kmi.properties.delta = 1
kmi = km.items.new('view3d.zoom', 'WHEELOUTMOUSE', 'PRESS')
kmi = km.keymap_items.new('view3d.zoom', 'WHEELOUTMOUSE', 'PRESS')
kmi.properties.delta = -1
kmi = km.items.new('view3d.view_all', 'HOME', 'PRESS')
kmi = km.keymap_items.new('view3d.view_all', 'HOME', 'PRESS')
kmi.properties.center = False
kmi = km.items.new('view3d.view_all', 'C', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.view_all', 'C', 'PRESS', shift=True)
kmi.properties.center = True
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_0', 'PRESS')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_0', 'PRESS')
kmi.properties.type = 'CAMERA'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS')
kmi.properties.type = 'FRONT'
kmi = km.items.new('view3d.view_orbit', 'NUMPAD_2', 'PRESS')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_2', 'PRESS')
kmi.properties.type = 'ORBITDOWN'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS')
kmi.properties.type = 'RIGHT'
kmi = km.items.new('view3d.view_orbit', 'NUMPAD_4', 'PRESS')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_4', 'PRESS')
kmi.properties.type = 'ORBITLEFT'
kmi = km.items.new('view3d.view_persportho', 'NUMPAD_5', 'PRESS')
kmi = km.items.new('view3d.view_orbit', 'NUMPAD_6', 'PRESS')
kmi = km.keymap_items.new('view3d.view_persportho', 'NUMPAD_5', 'PRESS')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_6', 'PRESS')
kmi.properties.type = 'ORBITRIGHT'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS')
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS')
kmi.properties.type = 'TOP'
kmi = km.items.new('view3d.view_orbit', 'NUMPAD_8', 'PRESS')
kmi = km.keymap_items.new('view3d.view_orbit', 'NUMPAD_8', 'PRESS')
kmi.properties.type = 'ORBITUP'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', ctrl=True)
kmi.properties.type = 'BACK'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', ctrl=True)
kmi.properties.type = 'LEFT'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', ctrl=True)
kmi.properties.type = 'BOTTOM'
kmi = km.items.new('view3d.view_pan', 'NUMPAD_2', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_2', 'PRESS', ctrl=True)
kmi.properties.type = 'PANDOWN'
kmi = km.items.new('view3d.view_pan', 'NUMPAD_4', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_4', 'PRESS', ctrl=True)
kmi.properties.type = 'PANLEFT'
kmi = km.items.new('view3d.view_pan', 'NUMPAD_6', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_6', 'PRESS', ctrl=True)
kmi.properties.type = 'PANRIGHT'
kmi = km.items.new('view3d.view_pan', 'NUMPAD_8', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'NUMPAD_8', 'PRESS', ctrl=True)
kmi.properties.type = 'PANUP'
kmi = km.items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', ctrl=True)
kmi.properties.type = 'PANRIGHT'
kmi = km.items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True)
kmi.properties.type = 'PANLEFT'
kmi = km.items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELUPMOUSE', 'PRESS', shift=True)
kmi.properties.type = 'PANUP'
kmi = km.items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.view_pan', 'WHEELDOWNMOUSE', 'PRESS', shift=True)
kmi.properties.type = 'PANDOWN'
kmi = km.items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', ctrl=True, alt=True)
kmi.properties.type = 'ORBITLEFT'
kmi = km.items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', ctrl=True, alt=True)
kmi.properties.type = 'ORBITRIGHT'
kmi = km.items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELUPMOUSE', 'PRESS', shift=True, alt=True)
kmi.properties.type = 'ORBITUP'
kmi = km.items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('view3d.view_orbit', 'WHEELDOWNMOUSE', 'PRESS', shift=True, alt=True)
kmi.properties.type = 'ORBITDOWN'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True)
kmi.properties.align_active = True
kmi.properties.type = 'FRONT'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True)
kmi.properties.align_active = True
kmi.properties.type = 'RIGHT'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True)
kmi.properties.align_active = True
kmi.properties.type = 'TOP'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_1', 'PRESS', shift=True, ctrl=True)
kmi.properties.align_active = True
kmi.properties.type = 'BACK'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_3', 'PRESS', shift=True, ctrl=True)
kmi.properties.align_active = True
kmi.properties.type = 'LEFT'
kmi = km.items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.viewnumpad', 'NUMPAD_7', 'PRESS', shift=True, ctrl=True)
kmi.properties.align_active = True
kmi.properties.type = 'BOTTOM'
kmi = km.items.new('view3d.localview', 'NUMPAD_SLASH', 'PRESS')
kmi = km.items.new('view3d.layers', 'ACCENT_GRAVE', 'PRESS')
kmi = km.keymap_items.new('view3d.localview', 'NUMPAD_SLASH', 'PRESS')
kmi = km.keymap_items.new('view3d.layers', 'ACCENT_GRAVE', 'PRESS')
kmi.properties.nr = 0
kmi = km.items.new('view3d.layers', 'ONE', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'ONE', 'PRESS', any=True)
kmi.properties.nr = 1
kmi = km.items.new('view3d.layers', 'TWO', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'TWO', 'PRESS', any=True)
kmi.properties.nr = 2
kmi = km.items.new('view3d.layers', 'THREE', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'THREE', 'PRESS', any=True)
kmi.properties.nr = 3
kmi = km.items.new('view3d.layers', 'FOUR', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'FOUR', 'PRESS', any=True)
kmi.properties.nr = 4
kmi = km.items.new('view3d.layers', 'FIVE', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'FIVE', 'PRESS', any=True)
kmi.properties.nr = 5
kmi = km.items.new('view3d.layers', 'SIX', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'SIX', 'PRESS', any=True)
kmi.properties.nr = 6
kmi = km.items.new('view3d.layers', 'SEVEN', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'SEVEN', 'PRESS', any=True)
kmi.properties.nr = 7
kmi = km.items.new('view3d.layers', 'EIGHT', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'EIGHT', 'PRESS', any=True)
kmi.properties.nr = 8
kmi = km.items.new('view3d.layers', 'NINE', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'NINE', 'PRESS', any=True)
kmi.properties.nr = 9
kmi = km.items.new('view3d.layers', 'ZERO', 'PRESS', any=True)
kmi = km.keymap_items.new('view3d.layers', 'ZERO', 'PRESS', any=True)
kmi.properties.nr = 10
kmi = km.items.new('wm.context_toggle_enum', 'Z', 'PRESS')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'Z', 'PRESS')
kmi.properties.data_path = 'space_data.viewport_shade'
kmi.properties.value_1 = 'SOLID'
kmi.properties.value_2 = 'WIREFRAME'
kmi = km.items.new('wm.context_toggle_enum', 'Z', 'PRESS', alt=True)
kmi = km.keymap_items.new('wm.context_toggle_enum', 'Z', 'PRESS', alt=True)
kmi.properties.data_path = 'space_data.viewport_shade'
kmi.properties.value_1 = 'TEXTURED'
kmi.properties.value_2 = 'SOLID'
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS')
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS')
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True)
kmi.properties.extend = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi.properties.center = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', alt=True)
kmi.properties.enumerate = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True)
kmi.properties.center = True
kmi.properties.extend = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi.properties.center = True
kmi.properties.enumerate = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, alt=True)
kmi.properties.enumerate = True
kmi.properties.extend = True
kmi = km.items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('view3d.select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True, alt=True)
kmi.properties.center = True
kmi.properties.enumerate = True
kmi.properties.extend = True
kmi = km.items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY')
kmi = km.keymap_items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY')
kmi.properties.extend = False
kmi = km.items.new('view3d.select_lasso', 'EVT_TWEAK_A', 'ANY', ctrl=True)
kmi = km.items.new('view3d.select_lasso', 'EVT_TWEAK_A', 'ANY', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.select_lasso', 'EVT_TWEAK_A', 'ANY', ctrl=True)
kmi = km.keymap_items.new('view3d.select_lasso', 'EVT_TWEAK_A', 'ANY', shift=True, ctrl=True)
kmi.properties.deselect = True
kmi = km.items.new('view3d.select_circle', 'C', 'PRESS')
kmi = km.items.new('view3d.clip_border', 'B', 'PRESS', alt=True)
kmi = km.items.new('view3d.zoom_border', 'B', 'PRESS', shift=True)
kmi = km.items.new('view3d.render_border', 'B', 'PRESS', shift=True)
kmi = km.items.new('view3d.camera_to_view', 'NUMPAD_0', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('view3d.object_as_camera', 'NUMPAD_0', 'PRESS', ctrl=True)
kmi = km.items.new('wm.call_menu', 'S', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.select_circle', 'C', 'PRESS')
kmi = km.keymap_items.new('view3d.clip_border', 'B', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.zoom_border', 'B', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.render_border', 'B', 'PRESS', shift=True)
kmi = km.keymap_items.new('view3d.camera_to_view', 'NUMPAD_0', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('view3d.object_as_camera', 'NUMPAD_0', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'S', 'PRESS', shift=True)
kmi.properties.name = 'VIEW3D_MT_snap'
kmi = km.items.new('wm.context_set_enum', 'COMMA', 'PRESS')
kmi = km.keymap_items.new('wm.context_set_enum', 'COMMA', 'PRESS')
kmi.properties.data_path = 'space_data.pivot_point'
kmi.properties.value = 'BOUNDING_BOX_CENTER'
kmi = km.items.new('wm.context_set_enum', 'COMMA', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.context_set_enum', 'COMMA', 'PRESS', ctrl=True)
kmi.properties.data_path = 'space_data.pivot_point'
kmi.properties.value = 'MEDIAN_POINT'
kmi = km.items.new('wm.context_toggle', 'COMMA', 'PRESS', alt=True)
kmi = km.keymap_items.new('wm.context_toggle', 'COMMA', 'PRESS', alt=True)
kmi.properties.data_path = 'space_data.use_pivot_point'
kmi = km.items.new('wm.context_toggle', 'Q', 'PRESS')
kmi = km.keymap_items.new('wm.context_toggle', 'Q', 'PRESS')
kmi.properties.data_path = 'space_data.show_manipulator'
kmi = km.items.new('wm.context_set_enum', 'PERIOD', 'PRESS')
kmi = km.keymap_items.new('wm.context_set_enum', 'PERIOD', 'PRESS')
kmi.properties.data_path = 'space_data.pivot_point'
kmi.properties.value = 'CURSOR'
kmi = km.items.new('wm.context_set_enum', 'PERIOD', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.context_set_enum', 'PERIOD', 'PRESS', ctrl=True)
kmi.properties.data_path = 'space_data.pivot_point'
kmi.properties.value = 'INDIVIDUAL_ORIGINS'
kmi = km.items.new('wm.context_set_enum', 'PERIOD', 'PRESS', alt=True)
kmi = km.keymap_items.new('wm.context_set_enum', 'PERIOD', 'PRESS', alt=True)
kmi.properties.data_path = 'space_data.pivot_point'
kmi.properties.value = 'ACTIVE_ELEMENT'
kmi = km.items.new('transform.translate', 'G', 'PRESS', shift=True)
kmi = km.items.new('transform.translate', 'EVT_TWEAK_S', 'ANY')
kmi = km.items.new('transform.rotate', 'R', 'PRESS', shift=True)
kmi = km.items.new('transform.resize', 'S', 'PRESS', shift=True)
kmi = km.items.new('transform.warp', 'W', 'PRESS', shift=True)
kmi = km.items.new('transform.tosphere', 'S', 'PRESS', shift=True, alt=True)
kmi = km.items.new('transform.shear', 'S', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.items.new('transform.select_orientation', 'SPACE', 'PRESS', alt=True)
kmi = km.items.new('transform.create_orientation', 'SPACE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('transform.translate', 'G', 'PRESS', shift=True)
kmi = km.keymap_items.new('transform.translate', 'EVT_TWEAK_S', 'ANY')
kmi = km.keymap_items.new('transform.rotate', 'R', 'PRESS', shift=True)
kmi = km.keymap_items.new('transform.resize', 'S', 'PRESS', shift=True)
kmi = km.keymap_items.new('transform.warp', 'W', 'PRESS', shift=True)
kmi = km.keymap_items.new('transform.tosphere', 'S', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('transform.shear', 'S', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('transform.select_orientation', 'SPACE', 'PRESS', alt=True)
kmi = km.keymap_items.new('transform.create_orientation', 'SPACE', 'PRESS', ctrl=True, alt=True)
kmi.properties.use = True
kmi = km.items.new('transform.mirror', 'M', 'PRESS', ctrl=True)
kmi = km.items.new('wm.context_toggle', 'TAB', 'PRESS', shift=True)
kmi = km.keymap_items.new('transform.mirror', 'M', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.context_toggle', 'TAB', 'PRESS', shift=True)
kmi.properties.data_path = 'tool_settings.use_snap'
kmi = km.items.new('transform.snap_type', 'TAB', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('view3d.enable_manipulator', 'W', 'PRESS')
kmi = km.keymap_items.new('transform.snap_type', 'TAB', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('view3d.enable_manipulator', 'W', 'PRESS')
kmi.properties.translate = True
kmi = km.items.new('view3d.enable_manipulator', 'E', 'PRESS')
kmi = km.keymap_items.new('view3d.enable_manipulator', 'E', 'PRESS')
kmi.properties.rotate = True
kmi = km.items.new('view3d.enable_manipulator', 'R', 'PRESS')
kmi = km.keymap_items.new('view3d.enable_manipulator', 'R', 'PRESS')
kmi.properties.scale = True
kmi = km.items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY', shift=True)
kmi = km.keymap_items.new('view3d.select_border', 'EVT_TWEAK_S', 'ANY', shift=True)
kmi.properties.extend = True
# Map Object Mode
km = kc.keymaps.new('Object Mode', space_type='EMPTY', region_type='WINDOW', modal=False)
kmi = km.items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi.properties.data_path = 'tool_settings.proportional_edit_falloff'
kmi = km.items.new('wm.context_toggle_enum', 'O', 'PRESS')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'O', 'PRESS')
kmi.properties.data_path = 'tool_settings.proportional_edit'
kmi.properties.value_1 = 'DISABLED'
kmi.properties.value_2 = 'ENABLED'
kmi = km.items.new('view3d.game_start', 'P', 'PRESS')
kmi = km.items.new('object.select_all', 'A', 'PRESS')
kmi = km.items.new('object.select_inverse', 'I', 'PRESS', ctrl=True)
kmi = km.items.new('object.select_linked', 'L', 'PRESS', shift=True)
kmi = km.items.new('object.select_grouped', 'G', 'PRESS', shift=True)
kmi = km.items.new('object.select_mirror', 'M', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS')
kmi = km.keymap_items.new('view3d.game_start', 'P', 'PRESS')
kmi = km.keymap_items.new('object.select_all', 'A', 'PRESS')
kmi = km.keymap_items.new('object.select_inverse', 'I', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.select_linked', 'L', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_grouped', 'G', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_mirror', 'M', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS')
kmi.properties.direction = 'PARENT'
kmi = km.items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_hierarchy', 'LEFT_BRACKET', 'PRESS', shift=True)
kmi.properties.direction = 'PARENT'
kmi.properties.extend = True
kmi = km.items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS')
kmi = km.keymap_items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS')
kmi.properties.direction = 'CHILD'
kmi = km.items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.select_hierarchy', 'RIGHT_BRACKET', 'PRESS', shift=True)
kmi.properties.direction = 'CHILD'
kmi.properties.extend = True
kmi = km.items.new('object.parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.items.new('object.parent_no_inverse_set', 'P', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('object.parent_clear', 'P', 'PRESS', alt=True)
kmi = km.items.new('object.track_set', 'T', 'PRESS', ctrl=True)
kmi = km.items.new('object.track_clear', 'T', 'PRESS', alt=True)
kmi = km.items.new('object.constraint_add_with_targets', 'C', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('object.constraints_clear', 'C', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('object.location_clear', 'G', 'PRESS', alt=True)
kmi = km.items.new('object.rotation_clear', 'R', 'PRESS', alt=True)
kmi = km.items.new('object.scale_clear', 'S', 'PRESS', alt=True)
kmi = km.items.new('object.origin_clear', 'O', 'PRESS', alt=True)
kmi = km.items.new('object.hide_view_clear', 'H', 'PRESS', alt=True)
kmi = km.items.new('object.hide_view_set', 'H', 'PRESS')
kmi = km.items.new('object.hide_view_set', 'H', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.parent_no_inverse_set', 'P', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.parent_clear', 'P', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.track_set', 'T', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.track_clear', 'T', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.constraint_add_with_targets', 'C', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('object.constraints_clear', 'C', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('object.location_clear', 'G', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.rotation_clear', 'R', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.scale_clear', 'S', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.origin_clear', 'O', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.hide_view_clear', 'H', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.hide_view_set', 'H', 'PRESS')
kmi = km.keymap_items.new('object.hide_view_set', 'H', 'PRESS', shift=True)
kmi.properties.unselected = True
kmi = km.items.new('object.move_to_layer', 'M', 'PRESS')
kmi = km.items.new('object.delete', 'X', 'PRESS')
kmi = km.items.new('object.delete', 'DEL', 'PRESS')
kmi = km.items.new('wm.call_menu', 'A', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.move_to_layer', 'M', 'PRESS')
kmi = km.keymap_items.new('object.delete', 'X', 'PRESS')
kmi = km.keymap_items.new('object.delete', 'DEL', 'PRESS')
kmi = km.keymap_items.new('wm.call_menu', 'A', 'PRESS', shift=True)
kmi.properties.name = 'INFO_MT_add'
kmi = km.items.new('object.duplicates_make_real', 'A', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('wm.call_menu', 'A', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.duplicates_make_real', 'A', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'A', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_object_apply'
kmi = km.items.new('wm.call_menu', 'U', 'PRESS')
kmi = km.keymap_items.new('wm.call_menu', 'U', 'PRESS')
kmi.properties.name = 'VIEW3D_MT_make_single_user'
kmi = km.items.new('wm.call_menu', 'L', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'L', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_make_links'
kmi = km.items.new('object.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.items.new('object.duplicate_move_linked', 'D', 'PRESS', alt=True)
kmi = km.items.new('object.join', 'J', 'PRESS', ctrl=True)
kmi = km.items.new('object.convert', 'C', 'PRESS', alt=True)
kmi = km.items.new('object.proxy_make', 'P', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('object.make_local', 'L', 'PRESS')
kmi = km.items.new('anim.keyframe_insert_menu', 'I', 'PRESS')
kmi = km.items.new('anim.keyframe_delete_v3d', 'I', 'PRESS', alt=True)
kmi = km.items.new('anim.keying_set_active_set', 'I', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.items.new('group.create', 'G', 'PRESS', ctrl=True)
kmi = km.items.new('group.objects_remove', 'G', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('group.objects_add_active', 'G', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('group.objects_remove_active', 'G', 'PRESS', shift=True, alt=True)
kmi = km.items.new('wm.call_menu', 'W', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.keymap_items.new('object.duplicate_move_linked', 'D', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.join', 'J', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.convert', 'C', 'PRESS', alt=True)
kmi = km.keymap_items.new('object.proxy_make', 'P', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('object.make_local', 'L', 'PRESS')
kmi = km.keymap_items.new('anim.keyframe_insert_menu', 'I', 'PRESS')
kmi = km.keymap_items.new('anim.keyframe_delete_v3d', 'I', 'PRESS', alt=True)
kmi = km.keymap_items.new('anim.keying_set_active_set', 'I', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('group.create', 'G', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('group.objects_remove', 'G', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('group.objects_add_active', 'G', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('group.objects_remove_active', 'G', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('wm.call_menu', 'W', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_object_specials'
kmi = km.items.new('object.subdivision_set', 'ZERO', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'ZERO', 'PRESS', ctrl=True)
kmi.properties.level = 0
kmi = km.items.new('object.subdivision_set', 'ONE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'ONE', 'PRESS', ctrl=True)
kmi.properties.level = 1
kmi = km.items.new('object.subdivision_set', 'TWO', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'TWO', 'PRESS', ctrl=True)
kmi.properties.level = 2
kmi = km.items.new('object.subdivision_set', 'THREE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'THREE', 'PRESS', ctrl=True)
kmi.properties.level = 3
kmi = km.items.new('object.subdivision_set', 'FOUR', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'FOUR', 'PRESS', ctrl=True)
kmi.properties.level = 4
kmi = km.items.new('object.subdivision_set', 'FIVE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.subdivision_set', 'FIVE', 'PRESS', ctrl=True)
kmi.properties.level = 5
kmi = km.items.new('object.select_all', 'SELECTMOUSE', 'CLICK')
kmi = km.keymap_items.new('object.select_all', 'SELECTMOUSE', 'CLICK')
kmi.properties.action = 'DESELECT'
# Map Mesh
km = kc.keymaps.new('Mesh', space_type='EMPTY', region_type='WINDOW', modal=False)
kmi = km.items.new('mesh.loopcut_slide', 'R', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('mesh.loopcut_slide', 'R', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.loop_select', 'SELECTMOUSE', 'PRESS', shift=True, alt=True)
kmi.properties.extend = True
kmi = km.items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.edgering_select', 'SELECTMOUSE', 'PRESS', shift=True, ctrl=True, alt=True)
kmi.properties.extend = True
kmi = km.items.new('mesh.select_shortest_path', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.select_all', 'A', 'PRESS')
kmi = km.items.new('mesh.select_more', 'NUMPAD_PLUS', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.select_less', 'NUMPAD_MINUS', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.select_inverse', 'I', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.select_non_manifold', 'M', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.items.new('mesh.select_linked', 'L', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.select_linked_pick', 'L', 'PRESS')
kmi = km.items.new('mesh.select_linked_pick', 'L', 'PRESS', shift=True)
kmi = km.keymap_items.new('mesh.select_shortest_path', 'SELECTMOUSE', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_all', 'A', 'PRESS')
kmi = km.keymap_items.new('mesh.select_more', 'NUMPAD_PLUS', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_less', 'NUMPAD_MINUS', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_inverse', 'I', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_non_manifold', 'M', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.select_linked', 'L', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_linked_pick', 'L', 'PRESS')
kmi = km.keymap_items.new('mesh.select_linked_pick', 'L', 'PRESS', shift=True)
kmi.properties.deselect = True
kmi = km.items.new('mesh.faces_select_linked_flat', 'F', 'PRESS', shift=True, ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.faces_select_linked_flat', 'F', 'PRESS', shift=True, ctrl=True, alt=True)
kmi.properties.sharpness = 135.0
kmi = km.items.new('mesh.select_similar', 'G', 'PRESS', shift=True)
kmi = km.items.new('wm.call_menu', 'TAB', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.select_similar', 'G', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.call_menu', 'TAB', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_select_mode'
kmi = km.items.new('mesh.hide', 'H', 'PRESS')
kmi = km.items.new('mesh.hide', 'H', 'PRESS', shift=True)
kmi = km.keymap_items.new('mesh.hide', 'H', 'PRESS')
kmi = km.keymap_items.new('mesh.hide', 'H', 'PRESS', shift=True)
kmi.properties.unselected = True
kmi = km.items.new('mesh.reveal', 'H', 'PRESS', alt=True)
kmi = km.items.new('mesh.normals_make_consistent', 'N', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.normals_make_consistent', 'N', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('mesh.reveal', 'H', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.normals_make_consistent', 'N', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.normals_make_consistent', 'N', 'PRESS', shift=True, ctrl=True)
kmi.properties.inside = True
kmi = km.items.new('view3d.edit_mesh_extrude_move_normal', 'E', 'PRESS', ctrl=True)
kmi = km.items.new('view3d.edit_mesh_extrude_individual_move', 'E', 'PRESS', shift=True)
kmi = km.items.new('wm.call_menu', 'E', 'PRESS', alt=True)
kmi = km.keymap_items.new('view3d.edit_mesh_extrude_move_normal', 'E', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('view3d.edit_mesh_extrude_individual_move', 'E', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.call_menu', 'E', 'PRESS', alt=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_extrude'
kmi = km.items.new('mesh.spin', 'R', 'PRESS', alt=True)
kmi = km.items.new('mesh.fill', 'F', 'PRESS', alt=True)
kmi = km.items.new('mesh.beautify_fill', 'F', 'PRESS', shift=True, alt=True)
kmi = km.items.new('mesh.quads_convert_to_tris', 'T', 'PRESS', ctrl=True)
kmi = km.items.new('mesh.tris_convert_to_quads', 'J', 'PRESS', alt=True)
kmi = km.items.new('mesh.edge_flip', 'F', 'PRESS', shift=True, ctrl=True)
kmi = km.items.new('mesh.rip_move', 'V', 'PRESS')
kmi = km.items.new('mesh.merge', 'M', 'PRESS', alt=True)
kmi = km.items.new('transform.shrink_fatten', 'S', 'PRESS', ctrl=True, alt=True)
kmi = km.items.new('mesh.edge_face_add', 'F', 'PRESS')
kmi = km.items.new('mesh.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.items.new('wm.call_menu', 'A', 'PRESS', shift=True)
kmi = km.keymap_items.new('mesh.spin', 'R', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.fill', 'F', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.beautify_fill', 'F', 'PRESS', shift=True, alt=True)
kmi = km.keymap_items.new('mesh.quads_convert_to_tris', 'T', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('mesh.tris_convert_to_quads', 'J', 'PRESS', alt=True)
kmi = km.keymap_items.new('mesh.edge_flip', 'F', 'PRESS', shift=True, ctrl=True)
kmi = km.keymap_items.new('mesh.rip_move', 'V', 'PRESS')
kmi = km.keymap_items.new('mesh.merge', 'M', 'PRESS', alt=True)
kmi = km.keymap_items.new('transform.shrink_fatten', 'S', 'PRESS', ctrl=True, alt=True)
kmi = km.keymap_items.new('mesh.edge_face_add', 'F', 'PRESS')
kmi = km.keymap_items.new('mesh.duplicate_move', 'D', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.call_menu', 'A', 'PRESS', shift=True)
kmi.properties.name = 'INFO_MT_mesh_add'
kmi = km.items.new('mesh.separate', 'P', 'PRESS')
kmi = km.items.new('mesh.split', 'Y', 'PRESS')
kmi = km.items.new('mesh.dupli_extrude_cursor', 'ACTIONMOUSE', 'CLICK', ctrl=True)
kmi = km.items.new('mesh.delete', 'X', 'PRESS')
kmi = km.items.new('mesh.delete', 'DEL', 'PRESS')
kmi = km.items.new('mesh.knife_cut', 'LEFTMOUSE', 'PRESS', key_modifier='K')
kmi = km.items.new('mesh.knife_cut', 'LEFTMOUSE', 'PRESS', shift=True, key_modifier='K')
kmi = km.keymap_items.new('mesh.separate', 'P', 'PRESS')
kmi = km.keymap_items.new('mesh.split', 'Y', 'PRESS')
kmi = km.keymap_items.new('mesh.dupli_extrude_cursor', 'ACTIONMOUSE', 'CLICK', ctrl=True)
kmi = km.keymap_items.new('mesh.delete', 'X', 'PRESS')
kmi = km.keymap_items.new('mesh.delete', 'DEL', 'PRESS')
kmi = km.keymap_items.new('mesh.knife_cut', 'LEFTMOUSE', 'PRESS', key_modifier='K')
kmi = km.keymap_items.new('mesh.knife_cut', 'LEFTMOUSE', 'PRESS', shift=True, key_modifier='K')
kmi.properties.type = 'MIDPOINTS'
kmi = km.items.new('object.vertex_parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.items.new('wm.call_menu', 'W', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('object.vertex_parent_set', 'P', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'W', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_specials'
kmi = km.items.new('wm.call_menu', 'F', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'F', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_faces'
kmi = km.items.new('wm.call_menu', 'E', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'E', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_edges'
kmi = km.items.new('wm.call_menu', 'V', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'V', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_edit_mesh_vertices'
kmi = km.items.new('wm.call_menu', 'H', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'H', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_hook'
kmi = km.items.new('wm.call_menu', 'U', 'PRESS')
kmi = km.keymap_items.new('wm.call_menu', 'U', 'PRESS')
kmi.properties.name = 'VIEW3D_MT_uv_map'
kmi = km.items.new('wm.call_menu', 'G', 'PRESS', ctrl=True)
kmi = km.keymap_items.new('wm.call_menu', 'G', 'PRESS', ctrl=True)
kmi.properties.name = 'VIEW3D_MT_vertex_group'
kmi = km.items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi = km.keymap_items.new('wm.context_cycle_enum', 'O', 'PRESS', shift=True)
kmi.properties.data_path = 'tool_settings.proportional_edit_falloff'
kmi = km.items.new('wm.context_toggle_enum', 'O', 'PRESS')
kmi = km.keymap_items.new('wm.context_toggle_enum', 'O', 'PRESS')
kmi.properties.data_path = 'tool_settings.proportional_edit'
kmi.properties.value_1 = 'DISABLED'
kmi.properties.value_2 = 'ENABLED'
kmi = km.items.new('wm.context_toggle_enum', 'O', 'PRESS', alt=True)
kmi = km.keymap_items.new('wm.context_toggle_enum', 'O', 'PRESS', alt=True)
kmi.properties.data_path = 'tool_settings.proportional_edit'
kmi.properties.value_1 = 'DISABLED'
kmi.properties.value_2 = 'CONNECTED'
kmi = km.items.new('mesh.select_all', 'SELECTMOUSE', 'CLICK')
kmi = km.keymap_items.new('mesh.select_all', 'SELECTMOUSE', 'CLICK')
kmi.properties.action = 'DESELECT'
wm.keyconfigs.active = kc
bpy.context.user_preferences.edit.use_drag_immediately = True
bpy.context.user_preferences.edit.use_insertkey_xyz_to_rgb = False
bpy.context.user_preferences.inputs.select_mouse = 'LEFT'
bpy.context.user_preferences.inputs.view_zoom_method = 'DOLLY'
bpy.context.user_preferences.inputs.view_zoom_axis = 'HORIZONTAL'
bpy.context.user_preferences.inputs.view_rotate_method = 'TURNTABLE'
bpy.context.user_preferences.inputs.invert_mouse_wheel_zoom = True

@ -0,0 +1,57 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#,
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
if "bpy" in locals():
from imp import reload as _reload
for val in _modules_loaded.values():
_reload(val)
_modules = (
"add_mesh_torus",
"animsys_update",
"image",
"mesh",
"nla",
"object_align",
"object",
"object_randomize_transform",
"presets",
"screen_play_rendered_anim",
"sequencer",
"uvcalc_follow_active",
"uvcalc_lightmap",
"uvcalc_smart_project",
"vertexpaint_dirt",
"wm",
)
__import__(name=__name__, fromlist=_modules)
_namespace = globals()
_modules_loaded = {name: _namespace[name] for name in _modules}
del _namespace
import bpy
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)

@ -136,20 +136,3 @@ class AddTorus(bpy.types.Operator):
add_object_utils.object_data_add(context, mesh, operator=self)
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(AddTorus.bl_idname, text="Torus", icon='MESH_TORUS')
def register():
bpy.utils.register_class(AddTorus)
bpy.types.INFO_MT_mesh_add.append(menu_func)
def unregister():
bpy.utils.unregister_class(AddTorus)
bpy.types.INFO_MT_mesh_add.remove(menu_func)
if __name__ == "__main__":
register()

@ -679,6 +679,7 @@ data_path_update = [
("SpeedControlSequence", "global_speed", "multiply_speed"),
("SpeedControlSequence", "use_curve_velocity", "use_as_speed"),
("SpeedControlSequence", "use_curve_compress_y", "scale_to_length"),
("Key", "keys", "key_blocks"),
]
@ -694,15 +695,3 @@ class UpdateAnimData(bpy.types.Operator):
import animsys_refactor
animsys_refactor.update_data_paths(data_path_update)
return {'FINISHED'}
if __name__ == "__main__":
bpy.ops.anim.update_data_paths()
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)

@ -187,14 +187,3 @@ class ProjectApply(bpy.types.Operator):
bpy.ops.paint.project_image(image=image_name)
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -170,14 +170,3 @@ class MeshMirrorUV(bpy.types.Operator):
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -168,18 +168,3 @@ class BakeAction(bpy.types.Operator):
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
#def menu_func(self, context):
# self.layout.operator(BakeAction.bl_idname, text="Bake Armature Action")
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -250,11 +250,11 @@ class ShapeTransfer(bpy.types.Operator):
def ob_add_shape(ob, name):
me = ob.data
key = ob.shape_key_add(from_mix=False)
if len(me.shape_keys.keys) == 1:
if len(me.shape_keys.key_blocks) == 1:
key.name = "Basis"
key = ob.shape_key_add(from_mix=False) # we need a rest
key.name = name
ob.active_shape_key_index = len(me.shape_keys.keys) - 1
ob.active_shape_key_index = len(me.shape_keys.key_blocks) - 1
ob.show_only_shape_key = True
from mathutils.geometry import barycentric_transform
@ -270,7 +270,7 @@ class ShapeTransfer(bpy.types.Operator):
orig_normals = me_nos(me.vertices)
# orig_coords = me_cos(me.vertices) # the actual mverts location isnt as relyable as the base shape :S
orig_coords = me_cos(me.shape_keys.keys[0].data)
orig_coords = me_cos(me.shape_keys.key_blocks[0].data)
for ob_other in objects:
me_other = ob_other.data
@ -280,7 +280,7 @@ class ShapeTransfer(bpy.types.Operator):
target_normals = me_nos(me_other.vertices)
if me_other.shape_keys:
target_coords = me_cos(me_other.shape_keys.keys[0].data)
target_coords = me_cos(me_other.shape_keys.key_blocks[0].data)
else:
target_coords = me_cos(me_other.vertices)
@ -561,14 +561,3 @@ class ClearAllRestrictRender(bpy.types.Operator):
for obj in context.scene.objects:
obj.hide_render = False
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -278,23 +278,3 @@ class AlignObjects(bpy.types.Operator):
return {'CANCELLED'}
else:
return {'FINISHED'}
def menu_func(self, context):
if context.mode == 'OBJECT':
self.layout.operator(AlignObjects.bl_idname,
text="Align Objects")
def register():
bpy.utils.register_module(__name__)
bpy.types.VIEW3D_MT_transform.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.VIEW3D_MT_transform.remove(menu_func)
if __name__ == "__main__":
register()

@ -145,23 +145,3 @@ class RandomizeLocRotSize(bpy.types.Operator):
randomize_selected(seed, delta, loc, rot, scale, scale_even)
return {'FINISHED'}
def menu_func(self, context):
if context.mode == 'OBJECT':
self.layout.operator(RandomizeLocRotSize.bl_idname,
text="Randomize Transform")
def register():
bpy.utils.register_module(__name__)
bpy.types.VIEW3D_MT_transform.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.VIEW3D_MT_transform.remove(menu_func)
if __name__ == "__main__":
register()

@ -351,14 +351,3 @@ class WM_MT_operator_presets(bpy.types.Menu):
return AddPresetOperator.operator_path(self.operator)
preset_operator = "script.execute_preset"
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -140,14 +140,3 @@ class PlayRenderedAnim(bpy.types.Operator):
#raise OSError("Couldn't find an external animation player.")
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -132,15 +132,3 @@ class SequencerDeinterlaceSelectedMovies(bpy.types.Operator):
s.use_deinterlace = True
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -248,22 +248,3 @@ class FollowActiveQuads(bpy.types.Operator):
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def menu_func(self, context):
self.layout.operator_context = 'INVOKE_REGION_WIN'
self.layout.operator(FollowActiveQuads.bl_idname)
def register():
bpy.utils.register_module(__name__)
bpy.types.VIEW3D_MT_uv_map.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.VIEW3D_MT_uv_map.remove(menu_func)
if __name__ == "__main__":
register()

@ -21,8 +21,6 @@
import bpy
import mathutils
from math import sqrt, pi
class prettyface(object):
__slots__ = "uv", "width", "height", "children", "xoff", "yoff", "has_parent", "rot"
@ -104,6 +102,7 @@ class prettyface(object):
pf.spin()
def place(self, xoff, yoff, xfac, yfac, margin_w, margin_h):
from math import pi
xoff += self.xoff
yoff += self.yoff
@ -196,6 +195,7 @@ def lightmap_uvpack(meshes,
and a higher value will have more clumpy boxes but more waisted space
'''
import time
from math import sqrt
if not meshes:
return
@ -580,28 +580,3 @@ class LightMapPack(bpy.types.Operator):
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def menu_func(self, context):
self.layout.operator_context = 'INVOKE_REGION_WIN'
self.layout.operator(LightMapPack.bl_idname)
def register():
bpy.utils.register_class(LightMapPack)
bpy.types.VIEW3D_MT_uv_map.append(menu_func)
def unregister():
bpy.utils.register_class(LightMapPack)
bpy.types.VIEW3D_MT_uv_map.remove(menu_func)
if __name__ == "__main__":
register()
'''
bpy.ops.import_scene.obj(filepath="/untitled.obj")
bpy.ops.uv.lightmap_pack(PREF_NEW_UVLAYER=1, PREF_APPLY_IMAGE=1, PREF_PACK_IN_ONE=1, PREF_CONTEXT='ALL_OBJECTS')
bpy.ops.wm.save_mainfile(filepath="/untitled.blend", check_existing=False)
'''

@ -1139,21 +1139,3 @@ class SmartProject(bpy.types.Operator):
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def menu_func(self, context):
self.layout.operator_context = 'INVOKE_REGION_WIN'
self.layout.operator(SmartProject.bl_idname, text="Smart Project")
def register():
bpy.utils.register_module(__name__)
bpy.types.VIEW3D_MT_uv_map.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.VIEW3D_MT_uv_map.remove(menu_func)
if __name__ == "__main__":
register()

@ -146,7 +146,6 @@ from bpy.props import FloatProperty, IntProperty, BoolProperty
class VertexPaintDirt(bpy.types.Operator):
bl_idname = "paint.vertex_color_dirt"
bl_label = "Dirty Vertex Colors"
bl_options = {'REGISTER', 'UNDO'}
@ -163,8 +162,8 @@ class VertexPaintDirt(bpy.types.Operator):
obj = context.object
if not obj or obj.type != 'MESH':
print('Error, no active mesh object, aborting')
return('CANCELLED',)
self.report({'ERROR'}, "Error, no active mesh object, aborting")
return {'CANCELLED'}
mesh = obj.data
@ -175,14 +174,3 @@ class VertexPaintDirt(bpy.types.Operator):
print('Dirt calculated in %.6f' % (time.time() - t))
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -552,6 +552,7 @@ class WM_OT_url_open(bpy.types.Operator):
def execute(self, context):
import webbrowser
_webbrowser_bug_fix()
webbrowser.open(self.url)
return {'FINISHED'}
@ -616,6 +617,15 @@ class WM_OT_doc_view(bpy.types.Operator):
url = '%s/bpy.ops.%s.html#bpy.ops.%s.%s' % \
(self._prefix, class_name, class_name, class_prop)
else:
# detect if this is a inherited member and use that name instead
rna_parent = getattr(bpy.types, class_name).bl_rna
rna_prop = rna_parent.properties[class_prop]
rna_parent = rna_parent.base
while rna_parent and rna_prop == rna_parent.properties.get(class_prop):
class_name = rna_parent.identifier
rna_parent = rna_parent.base
# It so happens that epydoc nests these, not sphinx
# class_name_full = self._nested_class_string(class_name)
url = '%s/bpy.types.%s.html#bpy.types.%s.%s' % \
@ -625,6 +635,7 @@ class WM_OT_doc_view(bpy.types.Operator):
return {'PASS_THROUGH'}
import webbrowser
_webbrowser_bug_fix()
webbrowser.open(url)
return {'FINISHED'}
@ -731,7 +742,12 @@ class WM_OT_properties_edit(bpy.types.Operator):
data_path = self.data_path
value = self.value
prop = self.property
prop_old = self._last_prop[0]
prop_old = getattr(self, "_last_prop", [None])[0]
if prop_old is None:
self.report({'ERROR'}, "Direct execution not supported")
return {'CANCELLED'}
try:
value_eval = eval(value)
@ -765,12 +781,19 @@ class WM_OT_properties_edit(bpy.types.Operator):
# otherwise existing buttons which reference freed
# memory may crash blender [#26510]
context.area.tag_redraw()
# context.area.tag_redraw()
for win in context.window_manager.windows:
for area in win.screen.areas:
area.tag_redraw()
return {'FINISHED'}
def invoke(self, context, event):
if not self.data_path:
self.report({'ERROR'}, "Data path not set")
return {'CANCELLED'}
self._last_prop = [self.property]
item = eval("context.%s" % self.data_path)
@ -836,6 +859,38 @@ class WM_OT_keyconfig_activate(bpy.types.Operator):
bpy.utils.keyconfig_set(self.filepath)
return {'FINISHED'}
class WM_OT_appconfig_default(bpy.types.Operator):
bl_idname = "wm.appconfig_default"
bl_label = "Default Application Configuration"
def execute(self, context):
import os
context.window_manager.keyconfigs.active = context.window_manager.keyconfigs.default
filepath = os.path.join(bpy.utils.preset_paths("interaction")[0], "blender.py")
if os.path.exists(filepath):
bpy.ops.script.execute_preset(filepath = filepath, menu_idname = "USERPREF_MT_interaction_presets")
return {'FINISHED'}
class WM_OT_appconfig_activate(bpy.types.Operator):
bl_idname = "wm.appconfig_activate"
bl_label = "Activate Application Configuration"
filepath = StringProperty(name="File Path", maxlen=1024)
def execute(self, context):
import os
bpy.utils.keyconfig_set(self.filepath)
filepath = self.filepath.replace("keyconfig", "interaction")
if os.path.exists(filepath):
bpy.ops.script.execute_preset(filepath = filepath, menu_idname = "USERPREF_MT_interaction_presets")
return {'FINISHED'}
class WM_OT_sysinfo(bpy.types.Operator):
'''Generate System Info'''
@ -848,12 +903,10 @@ class WM_OT_sysinfo(bpy.types.Operator):
return {'FINISHED'}
def register():
bpy.utils.register_module(__name__)
def _webbrowser_bug_fix():
# test for X11
import os
# test for X11
if os.environ.get("DISPLAY"):
# BSD licenced code copied from python, temp fix for bug
@ -910,10 +963,3 @@ def register():
import webbrowser
webbrowser.UnixBrowser._invoke = _invoke
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -0,0 +1,120 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# note, properties_animviz is a helper module only.
if "bpy" in locals():
from imp import reload as _reload
for val in _modules_loaded.values():
_reload(val)
_modules = (
"properties_animviz",
"properties_data_armature",
"properties_data_bone",
"properties_data_camera",
"properties_data_curve",
"properties_data_empty",
"properties_data_lamp",
"properties_data_lattice",
"properties_data_mesh",
"properties_data_metaball",
"properties_data_modifier",
"properties_game",
"properties_material",
"properties_object_constraint",
"properties_object",
"properties_particle",
"properties_physics_cloth",
"properties_physics_common",
"properties_physics_field",
"properties_physics_fluid",
"properties_physics_smoke",
"properties_physics_softbody",
"properties_render",
"properties_scene",
"properties_texture",
"properties_world",
"space_console",
"space_dopesheet",
"space_filebrowser",
"space_graph",
"space_image",
"space_info",
"space_logic",
"space_nla",
"space_node",
"space_outliner",
"space_sequencer",
"space_text",
"space_time",
"space_userpref_keymap",
"space_userpref",
"space_view3d",
"space_view3d_toolbar",
)
__import__(name=__name__, fromlist=_modules)
_namespace = globals()
_modules_loaded = {name: _namespace[name] for name in _modules}
del _namespace
import bpy
def register():
bpy.utils.register_module(__name__)
# space_userprefs.py
from bpy.props import StringProperty, EnumProperty
WindowManager = bpy.types.WindowManager
WindowManager.addon_search = StringProperty(name="Search", description="Search within the selected filter")
WindowManager.addon_filter = EnumProperty(
items=[('All', "All", ""),
('Enabled', "Enabled", ""),
('Disabled', "Disabled", ""),
('3D View', "3D View", ""),
('Add Curve', "Add Curve", ""),
('Add Mesh', "Add Mesh", ""),
('Animation', "Animation", ""),
('Development', "Development", ""),
('Game Engine', "Game Engine", ""),
('Import-Export', "Import-Export", ""),
('Mesh', "Mesh", ""),
('Object', "Object", ""),
('Render', "Render", ""),
('Rigging', "Rigging", ""),
('System', "System", "")
],
name="Category",
description="Filter add-ons by category",
)
WindowManager.addon_support = EnumProperty(
items=[('OFFICIAL', "Official", ""),
('COMMUNITY', 'Community', ""),
],
name="Support",
description="Display support level", default={'OFFICIAL', 'COMMUNITY'}, options={'ENUM_FLAG'})
# done...
def unregister():
bpy.utils.unregister_module(__name__)

@ -22,6 +22,10 @@ import bpy
# Generic Panels (Independent of DataType)
# NOTE:
# The specialised panel types are derived in their respective UI modules
# dont register these classes since they are only helpers.
class MotionPathButtonsPanel():
bl_space_type = 'PROPERTIES'
@ -90,17 +94,3 @@ class OnionSkinButtonsPanel():
col = split.column()
col.label(text="Display:")
col.prop(arm, "show_only_ghost_selected", text="Selected Only")
# NOTE:
# The specialised panel types are derived in their respective UI modules
# dont register these classes since they are only helpers.
def register():
pass # bpy.utils.register_module(__name__)
def unregister():
pass # bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -260,7 +260,10 @@ class DATA_PT_iksolver_itasc(ArmatureButtonsPanel, bpy.types.Panel):
row.prop(itasc, "damping_max", text="Damp", slider=True)
row.prop(itasc, "damping_epsilon", text="Eps", slider=True)
from properties_animviz import MotionPathButtonsPanel, OnionSkinButtonsPanel
from bl_ui.properties_animviz import (
MotionPathButtonsPanel,
OnionSkinButtonsPanel,
)
class DATA_PT_motion_paths(MotionPathButtonsPanel, bpy.types.Panel):
@ -307,14 +310,3 @@ class DATA_PT_custom_props_arm(ArmatureButtonsPanel, PropertyPanel, bpy.types.Pa
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Armature
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -17,6 +17,7 @@
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from rna_prop_ui import PropertyPanel
@ -367,14 +368,3 @@ class BONE_PT_custom_props(BoneButtonsPanel, PropertyPanel, bpy.types.Panel):
return "active_pose_bone"
else:
return "active_bone"
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -134,14 +134,3 @@ class DATA_PT_custom_props_camera(CameraButtonsPanel, PropertyPanel, bpy.types.P
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Camera
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -106,12 +106,12 @@ class DATA_PT_shape_curve(CurveButtonsPanel, bpy.types.Panel):
sub.prop(curve, "render_resolution_v", text="Render V")
if (is_curve or is_text):
col.label(text="Fill:")
sub = col.column()
sub.active = (curve.bevel_object is None)
sub.label(text="Fill:")
sub.prop(curve, "use_fill_front")
sub.prop(curve, "use_fill_back")
sub.prop(curve, "use_fill_deform", text="Fill Deformed")
col.prop(curve, "use_fill_deform", text="Fill Deformed")
col.label(text="Textures:")
col.prop(curve, "use_uv_as_generated")
@ -390,14 +390,3 @@ class DATA_PT_custom_props_curve(CurveButtonsPanel, PropertyPanel, bpy.types.Pan
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Curve
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -40,14 +40,3 @@ class DATA_PT_empty(DataButtonsPanel, bpy.types.Panel):
layout.prop(ob, "empty_draw_type", text="Display")
layout.prop(ob, "empty_draw_size", text="Size")
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -384,14 +384,3 @@ class DATA_PT_custom_props_lamp(DataButtonsPanel, PropertyPanel, bpy.types.Panel
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Lamp
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -80,14 +80,3 @@ class DATA_PT_custom_props_lattice(DataButtonsPanel, PropertyPanel, bpy.types.Pa
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Lattice
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -188,7 +188,7 @@ class DATA_PT_shape_keys(MeshButtonsPanel, bpy.types.Panel):
rows = 2
if kb:
rows = 5
row.template_list(key, "keys", ob, "active_shape_key_index", rows=rows)
row.template_list(key, "key_blocks", ob, "active_shape_key_index", rows=rows)
col = row.column()
@ -243,7 +243,7 @@ class DATA_PT_shape_keys(MeshButtonsPanel, bpy.types.Panel):
col.active = enable_edit_value
col.label(text="Blend:")
col.prop_search(kb, "vertex_group", ob, "vertex_groups", text="")
col.prop_search(kb, "relative_key", key, "keys", text="")
col.prop_search(kb, "relative_key", key, "key_blocks", text="")
else:
row = layout.row()
@ -349,14 +349,3 @@ class DATA_PT_custom_props_mesh(MeshButtonsPanel, PropertyPanel, bpy.types.Panel
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.Mesh
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -116,14 +116,3 @@ class DATA_PT_custom_props_metaball(DataButtonsPanel, PropertyPanel, bpy.types.P
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object.data"
_property_type = bpy.types.MetaBall
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -699,14 +699,3 @@ class DATA_PT_modifiers(ModifierButtonsPanel, bpy.types.Panel):
col = split.column()
col.prop(md, "width", slider=True)
col.prop(md, "narrowness", slider=True)
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -496,14 +496,3 @@ class WORLD_PT_game_physics(WorldButtonsPanel, bpy.types.Panel):
col = split.column()
col.label(text="Logic Steps:")
col.prop(gs, "logic_step_max", text="Max")
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -967,14 +967,3 @@ class MATERIAL_PT_custom_props(MaterialButtonsPanel, PropertyPanel, bpy.types.Pa
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "material"
_property_type = bpy.types.Material
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -288,7 +288,11 @@ class OBJECT_PT_animation(ObjectButtonsPanel, bpy.types.Panel):
col.prop(ob, "track_axis", text="Axis")
col.prop(ob, "up_axis", text="Up Axis")
from properties_animviz import MotionPathButtonsPanel, OnionSkinButtonsPanel
from bl_ui.properties_animviz import (
MotionPathButtonsPanel,
OnionSkinButtonsPanel,
)
class OBJECT_PT_motion_paths(MotionPathButtonsPanel, bpy.types.Panel):
@ -331,14 +335,3 @@ class OBJECT_PT_custom_props(ObjectButtonsPanel, PropertyPanel, bpy.types.Panel)
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
_context_path = "object"
_property_type = bpy.types.Object
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -779,14 +779,3 @@ class BONE_PT_constraints(ConstraintButtonsPanel, bpy.types.Panel):
for con in context.pose_bone.constraints:
self.draw_constraint(context, con)
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

@ -20,10 +20,12 @@
import bpy
from rna_prop_ui import PropertyPanel
from properties_physics_common import point_cache_ui
from properties_physics_common import effector_weights_ui
from properties_physics_common import basic_force_field_settings_ui
from properties_physics_common import basic_force_field_falloff_ui
from bl_ui.properties_physics_common import (
point_cache_ui,
effector_weights_ui,
basic_force_field_settings_ui,
basic_force_field_falloff_ui,
)
def particle_panel_enabled(context, psys):
@ -474,7 +476,7 @@ class PARTICLE_PT_physics(ParticleButtonsPanel, bpy.types.Panel):
col = split.column()
col.label(text="Integration:")
col.prop(part, "integrator", text="")
col.prop(part, "time_tweak")
col.prop(part, "timestep")
col.prop(part, "subframes")
row = layout.row()
@ -1209,14 +1211,3 @@ class PARTICLE_PT_custom_props(ParticleButtonsPanel, PropertyPanel, bpy.types.Pa
COMPAT_ENGINES = {'BLENDER_RENDER'}
_context_path = "particle_system.settings"
_property_type = bpy.types.ParticleSettings
def register():
bpy.utils.register_module(__name__)
def unregister():
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()

Some files were not shown because too many files have changed in this diff Show More