synched with trunk at revision 34793
209
release/Makefile
@ -1,209 +0,0 @@
|
||||
# -*- mode: gnumakefile; tab-width: 8; indent-tabs-mode: t; -*-
|
||||
# vim: tabstop=8
|
||||
# $Id$
|
||||
#
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
|
||||
# All rights reserved.
|
||||
#
|
||||
# The Original Code is: all of this file.
|
||||
#
|
||||
# Contributor(s): none yet.
|
||||
#
|
||||
# ***** END GPL LICENSE BLOCK *****
|
||||
|
||||
include nan_definitions.mk
|
||||
|
||||
export VERSION := $(shell ./getversion.py)
|
||||
|
||||
BLENDNAME=blender-$(VERSION)-$(CONFIG_GUESS)-py$(NAN_PYTHON_VERSION)$(TYPE)
|
||||
export DISTDIR=$(NAN_OBJDIR)/$(BLENDNAME)
|
||||
export CONFDIR=$(DISTDIR)/.blender
|
||||
|
||||
ifeq ($(OS),$(findstring $(OS), "freebsd irix linux openbsd solaris"))
|
||||
TAR="tar"
|
||||
TARFLAGS="cf"
|
||||
EXT0=""
|
||||
EXT1=".tar"
|
||||
COMPRESS="bzip2"
|
||||
COMPRESSFLAGS="-f"
|
||||
EXT2=".bz2"
|
||||
ifeq ($(OS), solaris)
|
||||
ifeq ($(CPU), i386)
|
||||
NOPLUGINS?=true
|
||||
endif
|
||||
endif
|
||||
# don"t build plugins on irix if using gcc
|
||||
ifeq ($(OS), irix)
|
||||
ifeq ($(IRIX_USE_GCC), true)
|
||||
NOPLUGINS?=true
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
ifeq ($(OS),windows)
|
||||
TAR="zip"
|
||||
TARFLAGS="-r9"
|
||||
EXT0=".exe"
|
||||
EXT1=".zip"
|
||||
NOPLUGINS?=true
|
||||
NOSTRIP?=true
|
||||
endif
|
||||
|
||||
ifeq ($(OS),darwin)
|
||||
TAR="tar"
|
||||
TARFLAGS="cf"
|
||||
EXT0=".app"
|
||||
EXT1=".tar"
|
||||
COMPRESS="bzip2"
|
||||
COMPRESSFLAGS="-f"
|
||||
EXT2=".bz2"
|
||||
endif
|
||||
|
||||
release: all
|
||||
|
||||
all:
|
||||
@$(MAKE) pkg TYPE=""
|
||||
ifeq ($(WITH_BF_STATICOPENGL), true)
|
||||
@$(MAKE) pkg TYPE="-static"
|
||||
endif
|
||||
|
||||
# OS independent targets below:
|
||||
|
||||
dist: all
|
||||
|
||||
package: version makedirs
|
||||
|
||||
install: package
|
||||
@#echo "****> Install text"
|
||||
@cp text/readme.html $(DISTDIR)
|
||||
@cp text/*.txt $(DISTDIR)
|
||||
ifeq ($(FREEDESKTOP), true)
|
||||
@#echo "****> Install freedesktop icons"
|
||||
@mkdir $(DISTDIR)/icons
|
||||
@mkdir $(DISTDIR)/icons/16x16
|
||||
@cp freedesktop/icons/16x16/blender.png $(DISTDIR)/icons/16x16
|
||||
@mkdir $(DISTDIR)/icons/22x22
|
||||
@cp freedesktop/icons/22x22/blender.png $(DISTDIR)/icons/22x22
|
||||
@mkdir $(DISTDIR)/icons/32x32
|
||||
@cp freedesktop/icons/32x32/blender.png $(DISTDIR)/icons/32x32
|
||||
@mkdir $(DISTDIR)/icons/scalable
|
||||
@cp freedesktop/icons/scalable/blender.svg $(DISTDIR)/icons/scalable
|
||||
endif
|
||||
@echo "----> Make Config dir .blender"
|
||||
@mkdir -p $(CONFDIR)
|
||||
@# possible overruling .txt text documents
|
||||
@[ ! -d $(CONFIG_GUESS)/text ] || \
|
||||
cp -f $(CONFIG_GUESS)/text/*.txt $(DISTDIR)
|
||||
#on OS X the contents of the .blender dir is already inside the bundle
|
||||
ifneq ($(OS), darwin)
|
||||
@[ ! -d $(OCGDIR)/bin/.blender ] || \
|
||||
cp -r $(OCGDIR)/bin/.blender $(DISTDIR)
|
||||
@cp $(NANBLENDERHOME)/release/bin/.blender/.Blanguages $(CONFDIR)
|
||||
@cp $(NANBLENDERHOME)/release/bin/.blender/.bfont.ttf $(CONFDIR)
|
||||
endif
|
||||
@echo "----> Copy blender$(EXT0) executable"
|
||||
ifeq ($(TYPE),-static)
|
||||
@cp $(OCGDIR)/bin/blenderstatic$(EXT0) $(DISTDIR)/blender$(EXT0)
|
||||
else
|
||||
ifeq ($(OS),darwin)
|
||||
@cp -r $(OCGDIR)/bin/blender$(EXT0) $(DISTDIR)/Blender$(EXT0)
|
||||
else
|
||||
@cp $(OCGDIR)/bin/blender$(EXT0) $(DISTDIR)/blender$(EXT0)
|
||||
endif
|
||||
@if [ -f $(OCGDIR)/bin/blenderplayer$(EXTO) ]; then \
|
||||
cp $(OCGDIR)/bin/blenderplayer$(EXTO) \
|
||||
$(DISTDIR)/blenderplayer$(EXTO) ; \
|
||||
fi
|
||||
endif
|
||||
|
||||
ifneq ($(NOPLUGINS),true)
|
||||
@echo "----> Copy and compile plugins"
|
||||
@cp -r plugins $(DISTDIR)/plugins
|
||||
@mkdir -p $(DISTDIR)/plugins/include
|
||||
@cp ../source/blender/blenpluginapi/*.h $(DISTDIR)/plugins/include/
|
||||
@chmod 755 $(DISTDIR)/plugins/bmake
|
||||
@$(MAKE) -C $(DISTDIR)/plugins all > /dev/null || exit 1;
|
||||
@rm -f $(DISTDIR)/plugins/*/*.o
|
||||
|
||||
#on OS X the plugins move to the installation directory
|
||||
ifneq ($(OS),darwin)
|
||||
@mkdir -p $(CONFDIR)/plugins/sequence
|
||||
@mkdir -p $(CONFDIR)/plugins/texture
|
||||
@mv $(DISTDIR)/plugins/sequence/*.so $(CONFDIR)/plugins/sequence
|
||||
@mv $(DISTDIR)/plugins/texture/*.so $(CONFDIR)/plugins/texture
|
||||
endif
|
||||
endif
|
||||
|
||||
@echo "----> Copy python infrastructure"
|
||||
@[ ! -d scripts ] || cp -r scripts $(CONFDIR)/scripts
|
||||
|
||||
ifeq ($(OS),darwin)
|
||||
@echo "----> Copy python modules"
|
||||
@mkdir $(CONFDIR)/python/
|
||||
@unzip -q $(LCGDIR)/release/python.zip -d $(CONFDIR)/python/
|
||||
endif
|
||||
|
||||
ifeq ($(OS),darwin)
|
||||
@echo "----> Move .blender to .app/Contents/MacOS/"
|
||||
@rm -fr $(DISTDIR)/blender$(EXT0)/Contents/MacOS/.blender
|
||||
@mv $(DISTDIR)/.blender $(DISTDIR)/blender$(EXT0)/Contents/MacOS/
|
||||
endif
|
||||
|
||||
ifneq ($(NOSTRIP),true)
|
||||
@echo "----> Strip blender executable"
|
||||
ifeq ($(OS),darwin)
|
||||
@strip -x $(DISTDIR)/blender$(EXT0)/Contents/MacOS/blender
|
||||
else
|
||||
@strip -x $(DISTDIR)/blender$(EXT0)
|
||||
@if [ -f $(DISTDIR)/blenderplayer$(EXTO) ]; then \
|
||||
strip -x $(DISTDIR)/blenderplayer$(EXT0) ; \
|
||||
fi
|
||||
endif
|
||||
endif
|
||||
@[ ! -x $(CONFIG_GUESS)/specific.sh ] || (\
|
||||
echo "**--> Execute specific.sh in $(CONFIG_GUESS)/" && \
|
||||
cd $(CONFIG_GUESS) && ./specific.sh )
|
||||
@echo "----> Cleaning .svn metadata directories"
|
||||
@find $(DISTDIR) -type d -name ".svn" | xargs rm -fr
|
||||
|
||||
pkg: install
|
||||
@echo "----> Create distribution file $(BLENDNAME)$(EXT1)"
|
||||
@#enable the next sleep if you get 'tar file changed while reading'
|
||||
@#sleep 10
|
||||
rm -f $(NAN_OBJDIR)/$(VERSION)/$(BLENDNAME)$(EXT1)*
|
||||
@cd $(NAN_OBJDIR) && $(TAR) $(TARFLAGS) $(VERSION)/$(BLENDNAME)$(EXT1) $(BLENDNAME)
|
||||
ifdef COMPRESS
|
||||
@echo "----> Compressing distribution to $(BLENDNAME)$(EXT1)$(EXT2)"
|
||||
@$(COMPRESS) $(COMPRESSFLAGS) $(NAN_OBJDIR)/$(VERSION)/$(BLENDNAME)$(EXT1)
|
||||
endif
|
||||
@#echo "****> Clean up temporary distribution directory"
|
||||
@#rm -fr $(DISTDIR)
|
||||
@echo "****> $(NAN_OBJDIR)/$(VERSION)/$(BLENDNAME)$(EXT1)$(EXT2) is ready"
|
||||
|
||||
version: FORCE
|
||||
@echo "*---> Create $(BLENDNAME) package"
|
||||
|
||||
makedirs: FORCE
|
||||
@echo "****> Create package directory $(VERSION) if necessary"
|
||||
@[ -d $(NAN_OBJDIR)/$(VERSION) ] || mkdir $(NAN_OBJDIR)/$(VERSION)
|
||||
@echo "****> Remove and recreate temporary distribution directory"
|
||||
@rm -fr $(DISTDIR)
|
||||
@mkdir $(DISTDIR)
|
||||
|
||||
FORCE:
|
@ -1 +1 @@
|
||||
2.54-beta
|
||||
2.56a-beta
|
||||
|
@ -18,40 +18,65 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
Thumbnailer runs with python 2.6 and 3.x.
|
||||
To run automatically with nautilus:
|
||||
gconftool --type boolean --set /desktop/gnome/thumbnailers/application@x-blender/enable true
|
||||
gconftool --type string --set /desktop/gnome/thumbnailers/application@x-blender/command "blender-thumbnailer.py %i %o"
|
||||
gconftool --type string --set /desktop/gnome/thumbnailers/application@x-blender/command "blender-thumbnailer.py %u %o"
|
||||
"""
|
||||
|
||||
import struct
|
||||
|
||||
|
||||
def open_wrapper_get():
|
||||
""" wrap OS spesific read functionality here, fallback to 'open()'
|
||||
"""
|
||||
|
||||
def open_gio(path, mode):
|
||||
g_file = gio.File(path).read()
|
||||
g_file.orig_seek = g_file.seek
|
||||
|
||||
def new_seek(offset, whence=0):
|
||||
return g_file.orig_seek(offset, [1, 0, 2][whence])
|
||||
|
||||
g_file.seek = new_seek
|
||||
return g_file
|
||||
|
||||
try:
|
||||
import gio
|
||||
return open_gio
|
||||
except ImportError:
|
||||
return open
|
||||
|
||||
|
||||
def blend_extract_thumb(path):
|
||||
import os
|
||||
open_wrapper = open_wrapper_get()
|
||||
|
||||
# def MAKE_ID(tag): ord(tag[0])<<24 | ord(tag[1])<<16 | ord(tag[2])<<8 | ord(tag[3])
|
||||
REND = 1145980242 # MAKE_ID(b'REND')
|
||||
TEST = 1414743380 # MAKE_ID(b'TEST')
|
||||
REND = 1145980242 # MAKE_ID(b'REND')
|
||||
TEST = 1414743380 # MAKE_ID(b'TEST')
|
||||
|
||||
blendfile = open(path, 'rb')
|
||||
blendfile = open_wrapper(path, 'rb')
|
||||
|
||||
head = blendfile.read(12)
|
||||
|
||||
if head[0:2] == b'\x1f\x8b': # gzip magic
|
||||
if head[0:2] == b'\x1f\x8b': # gzip magic
|
||||
import gzip
|
||||
blendfile.close()
|
||||
blendfile = gzip.open(path, 'rb')
|
||||
blendfile = gzip.GzipFile('', 'rb', 0, open_wrapper(path, 'rb'))
|
||||
head = blendfile.read(12)
|
||||
|
||||
if not head.startswith(b'BLENDER'):
|
||||
blendfile.close()
|
||||
return None, 0, 0
|
||||
|
||||
is_64_bit = (head[7] == b'-')
|
||||
is_64_bit = (head[7] == b'-'[0])
|
||||
|
||||
# true for PPC, false for X86
|
||||
is_big_endian = (head[8] == b'V')
|
||||
is_big_endian = (head[8] == b'V'[0])
|
||||
|
||||
# blender pre 2.5 had no thumbs
|
||||
if head[9:11] <= b'24':
|
||||
@ -66,23 +91,22 @@ def blend_extract_thumb(path):
|
||||
if len(bhead) < sizeof_bhead:
|
||||
return None, 0, 0
|
||||
|
||||
code, length = struct.unpack(int_endian_pair, bhead[0:8]) # 8 == sizeof(int) * 2
|
||||
code, length = struct.unpack(int_endian_pair, bhead[0:8]) # 8 == sizeof(int) * 2
|
||||
|
||||
if code == REND:
|
||||
blendfile.seek(length, os.SEEK_CUR)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
|
||||
if code != TEST:
|
||||
return None, 0, 0
|
||||
|
||||
try:
|
||||
x, y = struct.unpack(int_endian_pair, blendfile.read(8)) # 8 == sizeof(int) * 2
|
||||
x, y = struct.unpack(int_endian_pair, blendfile.read(8)) # 8 == sizeof(int) * 2
|
||||
except struct.error:
|
||||
return None, 0, 0
|
||||
|
||||
length -= 8 # sizeof(int) * 2
|
||||
length -= 8 # sizeof(int) * 2
|
||||
|
||||
if length != x * y * 4:
|
||||
return None, 0, 0
|
||||
@ -116,13 +140,13 @@ def write_png(buf, width, height):
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
if len(sys.argv) < 3:
|
||||
print("Expected 2 arguments <input.blend> <output.png>")
|
||||
else:
|
||||
file_in = sys.argv[-2]
|
||||
|
||||
buf, width, height = blend_extract_thumb(file_in)
|
||||
|
||||
|
||||
if buf:
|
||||
file_out = sys.argv[-1]
|
||||
|
||||
|
Before Width: | Height: | Size: 205 KiB After Width: | Height: | Size: 205 KiB |
58
release/datafiles/ctodata.py
Normal file
@ -0,0 +1,58 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# The Original Code is Copyright (C) 2009 Blender Foundation.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Contributor(s): Campbell Barton
|
||||
#
|
||||
# ***** END GPL LICENCE BLOCK *****
|
||||
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
sys.stdout.write("Usage: ctodata <c_file>\n")
|
||||
sys.exit(1)
|
||||
|
||||
filename = sys.argv[1]
|
||||
|
||||
try:
|
||||
fpin = open(filename, "r")
|
||||
except:
|
||||
sys.stdout.write("Unable to open input %s\n" % sys.argv[1])
|
||||
sys.exit(1)
|
||||
|
||||
data = fpin.read().rsplit("{")[-1].split("}")[0]
|
||||
data = data.replace(",", " ")
|
||||
data = data.split()
|
||||
data = bytes([int(v) for v in data])
|
||||
|
||||
dname = filename + ".ctodata"
|
||||
|
||||
try:
|
||||
fpout = open(dname, "wb")
|
||||
except:
|
||||
sys.stdout.write("Unable to open output %s\n" % dname)
|
||||
sys.exit(1)
|
||||
|
||||
fpout.write(data)
|
@ -45,8 +45,8 @@ fpin.seek(0, os.SEEK_END)
|
||||
size = fpin.tell()
|
||||
fpin.seek(0)
|
||||
|
||||
if filename[0] == ".":
|
||||
filename = filename[1:]
|
||||
if filename[0:2] == "." + os.sep:
|
||||
filename = filename[2:]
|
||||
|
||||
cname = filename + ".c"
|
||||
sys.stdout.write("Making C file <%s>\n" % cname)
|
||||
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 213 KiB After Width: | Height: | Size: 171 KiB |
Before Width: | Height: | Size: 746 B After Width: | Height: | Size: 845 B |
250
release/freedesktop/icons/16x16/blender.svg
Normal file
@ -0,0 +1,250 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="16"
|
||||
height="16"
|
||||
id="svg7854"
|
||||
sodipodi:version="0.32"
|
||||
inkscape:version="0.48.0 r9654"
|
||||
version="1.0"
|
||||
sodipodi:docname="blender.svg"
|
||||
inkscape:output_extension="org.inkscape.output.svg.inkscape"
|
||||
sodipodi:modified="true"
|
||||
inkscape:export-filename="/home/user/my/blender/builds/blender/release/freedesktop/icons/16x16/blender.png"
|
||||
inkscape:export-xdpi="90"
|
||||
inkscape:export-ydpi="90">
|
||||
<defs
|
||||
id="defs7856">
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient39171">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop39173" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop39175" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="linearGradient39155">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop39157" />
|
||||
<stop
|
||||
style="stop-color:#dadada;stop-opacity:1;"
|
||||
offset="1"
|
||||
id="stop39159" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient35500">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop35502" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop35504" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient35488">
|
||||
<stop
|
||||
style="stop-color:black;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop35490" />
|
||||
<stop
|
||||
style="stop-color:black;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop35492" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient3564">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop3566" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop3568" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient3564"
|
||||
id="linearGradient34576"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="185.9903"
|
||||
y1="193.33229"
|
||||
x2="190.46461"
|
||||
y2="-458.05771"
|
||||
gradientTransform="matrix(0.06818845,0,0,0.06818845,22.51112,27.02885)" />
|
||||
<radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient35488"
|
||||
id="radialGradient35494"
|
||||
cx="28.019106"
|
||||
cy="38.98439"
|
||||
fx="28.019106"
|
||||
fy="38.98439"
|
||||
r="15.467961"
|
||||
gradientTransform="matrix(1,0,0,0.342857,0,25.61831)"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient35500"
|
||||
id="linearGradient35506"
|
||||
x1="21.204315"
|
||||
y1="21.699249"
|
||||
x2="20.155914"
|
||||
y2="-26.908371"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient39155"
|
||||
id="linearGradient39161"
|
||||
x1="31.1875"
|
||||
y1="18.875"
|
||||
x2="29.875"
|
||||
y2="34.375"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient39171"
|
||||
id="radialGradient39177"
|
||||
cx="26.109201"
|
||||
cy="19.668886"
|
||||
fx="26.109201"
|
||||
fy="19.668886"
|
||||
r="20.278975"
|
||||
gradientTransform="matrix(1.647222,0,0,1.26792,-15.47413,-5.79794)"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#e0e0e0"
|
||||
borderopacity="1"
|
||||
gridtolerance="10000"
|
||||
guidetolerance="10"
|
||||
objecttolerance="10"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="16.245778"
|
||||
inkscape:cx="10.326105"
|
||||
inkscape:cy="15.440713"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
width="48px"
|
||||
height="48px"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:window-width="1392"
|
||||
inkscape:window-height="976"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
showgrid="false"
|
||||
inkscape:window-maximized="1" />
|
||||
<metadata
|
||||
id="metadata7859">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:creator>
|
||||
<cc:Agent>
|
||||
<dc:title>Jakub Steiner</dc:title>
|
||||
</cc:Agent>
|
||||
</dc:creator>
|
||||
<dc:source>http://jimmac.musichall.cz</dc:source>
|
||||
<cc:license
|
||||
rdf:resource="http://creativecommons.org/licenses/GPL/2.0/" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
<cc:License
|
||||
rdf:about="http://creativecommons.org/licenses/GPL/2.0/">
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/Reproduction" />
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/Distribution" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/Notice" />
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/DerivativeWorks" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/ShareAlike" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/SourceCode" />
|
||||
</cc:License>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(0,-32)">
|
||||
<g
|
||||
id="blender"
|
||||
transform="matrix(0.32150786,0,0,0.32150786,0.0378132,31.723202)"
|
||||
inkscape:label="blender">
|
||||
<path
|
||||
transform="matrix(1.274286,0,0,1.377124,-7.569123,-16.70193)"
|
||||
d="m 43.487067,38.98439 c 0,2.928932 -6.925242,5.303301 -15.467961,5.303301 -8.542719,0 -15.467961,-2.374369 -15.467961,-5.303301 0,-2.928932 6.925242,-5.303301 15.467961,-5.303301 8.542719,0 15.467961,2.374369 15.467961,5.303301 z"
|
||||
sodipodi:ry="5.3033009"
|
||||
sodipodi:rx="15.467961"
|
||||
sodipodi:cy="38.98439"
|
||||
sodipodi:cx="28.019106"
|
||||
id="path35486"
|
||||
style="opacity:0.54857142;color:#000000;fill:url(#radialGradient35494);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssssssssscccsscccscccssccc"
|
||||
d="m 16.048489,28.093447 c 0.0098,0.576682 0.196474,1.697902 0.471116,2.577425 0.581566,1.854137 1.56684,3.572658 2.939126,5.086496 1.407488,1.553118 3.138519,2.803227 5.139315,3.68976 2.105357,0.931573 4.384795,1.407488 6.750134,1.403741 2.365339,-0.005 4.644601,-0.488686 6.74896,-1.427017 2.00002,-0.895288 3.731043,-2.148391 5.13754,-3.705517 1.369207,-1.519844 2.352576,-3.241114 2.934089,-5.096258 0.294262,-0.938353 0.476921,-1.889392 0.553238,-2.845308 0.07331,-0.939306 0.04204,-1.883511 -0.09183,-2.823792 -0.259981,-1.835599 -0.896294,-3.556847 -1.872652,-5.12758 -0.895541,-1.441699 -2.047808,-2.70454 -3.417268,-3.766975 0,0 0.002,-0.002 0.002,-0.002 0,0 -13.828458,-10.6197195 -13.828458,-10.6197195 -0.01176,-0.00978 -0.02252,-0.019551 -0.03529,-0.028344 -0.909003,-0.6959264 -2.434775,-0.6939758 -3.431728,0.00488 -1.01067,0.7057021 -1.091821,1.8092613 -0.195527,2.5482146 1.899775,1.4997633 3.792068,3.0680399 5.702368,4.5676189 0,0 -17.551681,-0.01171 -17.551681,-0.01171 -1.994685,0 -3.1682604,0.947915 -3.4153942,2.333683 -0.2180771,1.222836 0.7479213,2.738129 2.4800212,2.738129 2.956573,0.0039 5.942111,-0.0069 8.909215,-0.01272 0,0 -15.901723,11.764162 -15.901723,11.764162 -0.020527,0.01564 -0.041053,0.02933 -0.06158,0.04497 -1.4974197,1.148389 -1.9831951,3.059322 -1.0399808,4.268393 0.9598323,1.22959 2.9977653,1.230588 4.5147288,0.006 0,0 8.677593,-7.102098 8.677593,-7.102098 0,0 -0.12511,0.959824 -0.116333,1.535532 z"
|
||||
id="path2482"
|
||||
style="fill:#f57900;fill-rule:evenodd;stroke:#ce5c00;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
transform="matrix(0.821621,0,0,0.839506,5.875686,3.882724)"
|
||||
d="m 42.75,25.75 c 0,5.591883 -5.176708,10.125 -11.5625,10.125 -6.385792,0 -11.5625,-4.533117 -11.5625,-10.125 0,-5.591883 5.176708,-10.125 11.5625,-10.125 6.385792,0 11.5625,4.533117 11.5625,10.125 z"
|
||||
sodipodi:ry="10.125"
|
||||
sodipodi:rx="11.5625"
|
||||
sodipodi:cy="25.75"
|
||||
sodipodi:cx="31.1875"
|
||||
id="path39153"
|
||||
style="color:#000000;fill:url(#linearGradient39161);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="cssssscscczccsccssssccscccccscssc"
|
||||
id="path3562"
|
||||
d="m 25.796988,6.0267804 c -0.404852,5.53e-4 -0.818619,0.1256944 -1.095272,0.3196267 -7.14e-4,7.142e-4 -0.0014,0.00143 -0.0021,0.00213 -0.280209,0.1956525 -0.336859,0.3680061 -0.345206,0.4602725 -0.0083,0.092266 -0.01324,0.1672776 0.189655,0.3345475 0.01899,0.015735 0.03747,0.032076 0.0554,0.049009 0.124258,0.1010285 5.704394,4.6389489 5.704394,4.6389489 0.373658,0.304091 0.51584,0.810232 0.355197,1.264415 -0.160635,0.454191 -0.589422,0.382732 -1.071174,0.384283 -5.634142,0.05114 -17.60967,0.01918 -17.60967,0.01918 -0.952967,6.38e-4 -2.3472795,0.516793 -2.4135719,1.585761 -0.063562,1.024947 0.9093059,1.457499 1.5782589,1.457499 0,0 8.830403,-0.01705 8.830403,-0.01705 0.488364,-5.91e-4 0.922857,0.221532 1.080466,0.683755 0.15761,0.462231 0.0033,0.53156 -0.383664,0.829439 0,0 -15.9006939,12.205735 -15.9006939,12.205735 -0.00142,0.0014 -0.00284,0.0028 -0.00426,0.0043 -0.064038,0.04879 -0.084772,0.06226 -0.061795,0.04476 -0.5536756,0.424618 -0.8961097,0.98072 -1.0185711,1.476701 -0.1224537,0.495981 -0.04659,0.882548 0.1875202,1.182646 0.4788333,0.613413 1.7693735,0.732111 2.8980115,-0.178996 0,0 8.6727243,-7.09799 8.6727243,-7.09799 0.361955,-0.295752 0.867758,-0.340606 1.276111,-0.113169 0.408345,0.227437 0.636512,0.681082 0.575631,1.144518 0,0 -0.112502,0.980045 -0.10655,1.370159 0.192357,2.636407 1.448328,4.914995 3.115366,6.91474 2.877746,3.172809 6.84939,4.556285 11.042271,4.719919 4.20342,-0.04394 8.185784,-1.662428 11.042264,-4.758277 5.218918,-6.385867 3.941737,-13.3639 -1.747326,-17.993227 C 36.14442,13.301598 31.42752,9.8792062 26.81986,6.3400589 c -0.0043,-0.00352 -0.0086,-0.00707 -0.01279,-0.010651 -0.0072,-0.00489 -0.01427,-0.00987 -0.02131,-0.014921 -0.210578,-0.1612288 -0.584681,-0.288267 -0.988772,-0.2877065 z"
|
||||
style="opacity:0.4857143;fill:none;stroke:url(#linearGradient34576);stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 26.007076,24.754048 c 0.07447,-1.361157 0.739293,-2.562655 1.738705,-3.413271 0.983518,-0.836183 2.304215,-1.346747 3.746876,-1.346747 1.441743,0 2.762441,0.510564 3.745729,1.346747 1.000515,0.850616 1.664539,2.051213 1.739875,3.41237 0.07718,1.400852 -0.4828,2.701576 -1.46425,3.66495 -1.000516,0.981409 -2.427099,1.597503 -4.021354,1.597503 -1.595172,0 -3.021756,-0.616094 -4.022225,-1.597503 -0.982461,-0.963374 -1.540507,-2.264098 -1.463356,-3.664049 z"
|
||||
id="path2478"
|
||||
style="fill:#3465a4;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssscsccsscsccssssscsscccsssc"
|
||||
id="path39166"
|
||||
d="m 25.8125,6.03125 c -0.404852,5.528e-4 -0.848347,0.1185677 -1.125,0.3125 -0.280209,0.1956523 -0.335403,0.3764836 -0.34375,0.46875 -0.0083,0.092267 -0.01539,0.1764801 0.1875,0.34375 0.01899,0.015735 0.04457,0.014317 0.0625,0.03125 0.124258,0.1010283 5.71875,4.65625 5.71875,4.65625 0.373658,0.304091 0.504393,0.795817 0.34375,1.25 -0.160635,0.454191 -0.580748,0.373449 -1.0625,0.375 -5.634142,0.05114 -17.625,0.03125 -17.625,0.03125 -0.952967,6.38e-4 -2.3399576,0.524782 -2.40625,1.59375 -0.063562,1.024947 0.924797,1.4375 1.59375,1.4375 0,-1e-6 8.8125,0 8.8125,0 0.488364,-5.92e-4 0.936141,0.225277 1.09375,0.6875 0.157609,0.462231 -0.01926,0.514621 -0.40625,0.8125 0,0 -15.875,12.21875 -15.875,12.21875 -0.00142,0.0014 -0.029829,-0.0014 -0.03125,0 -0.064037,0.04879 -0.054226,0.04875 -0.03125,0.03125 -0.5536758,0.424619 -0.9087886,1.004019 -1.03125,1.5 -0.1224536,0.495981 -0.04661,0.856152 0.1875,1.15625 0.4788333,0.613413 1.777612,0.754857 2.90625,-0.15625 1e-7,10e-7 8.65625,-7.09375 8.65625,-7.09375 0.361955,-0.295753 0.872897,-0.352437 1.28125,-0.125 0.408345,0.227436 0.623381,0.692814 0.5625,1.15625 0,-1e-6 -0.0997,0.953636 -0.09375,1.34375 0.09498,1.301756 0.451616,2.521825 0.989039,3.664234 C 20.799917,36.321089 27.770982,19.392853 44.1875,21.03125 43.339652,19.54368 42.151282,18.185293 40.65625,16.96875 36.159865,13.309932 31.42016,9.8828973 26.8125,6.34375 26.805335,6.3388584 26.788292,6.317553 26.78125,6.3125 26.570707,6.1513121 26.216591,6.0306895 25.8125,6.03125 z"
|
||||
style="opacity:0.51999996;fill:url(#radialGradient39177);fill-opacity:1;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB |
250
release/freedesktop/icons/22x22/blender.svg
Normal file
@ -0,0 +1,250 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="22"
|
||||
height="22"
|
||||
id="svg7854"
|
||||
sodipodi:version="0.32"
|
||||
inkscape:version="0.48.0 r9654"
|
||||
version="1.0"
|
||||
sodipodi:docname="blender.svg"
|
||||
inkscape:output_extension="org.inkscape.output.svg.inkscape"
|
||||
sodipodi:modified="true"
|
||||
inkscape:export-filename="/home/user/my/blender/builds/blender/release/freedesktop/icons/22x22/blender.png"
|
||||
inkscape:export-xdpi="90"
|
||||
inkscape:export-ydpi="90">
|
||||
<defs
|
||||
id="defs7856">
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient39171">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop39173" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop39175" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
id="linearGradient39155">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop39157" />
|
||||
<stop
|
||||
style="stop-color:#dadada;stop-opacity:1;"
|
||||
offset="1"
|
||||
id="stop39159" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient35500">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop35502" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop35504" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient35488">
|
||||
<stop
|
||||
style="stop-color:black;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop35490" />
|
||||
<stop
|
||||
style="stop-color:black;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop35492" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
id="linearGradient3564">
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:1;"
|
||||
offset="0"
|
||||
id="stop3566" />
|
||||
<stop
|
||||
style="stop-color:white;stop-opacity:0;"
|
||||
offset="1"
|
||||
id="stop3568" />
|
||||
</linearGradient>
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient3564"
|
||||
id="linearGradient34576"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="185.9903"
|
||||
y1="193.33229"
|
||||
x2="190.46461"
|
||||
y2="-458.05771"
|
||||
gradientTransform="matrix(0.06818845,0,0,0.06818845,22.51112,27.02885)" />
|
||||
<radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient35488"
|
||||
id="radialGradient35494"
|
||||
cx="28.019106"
|
||||
cy="38.98439"
|
||||
fx="28.019106"
|
||||
fy="38.98439"
|
||||
r="15.467961"
|
||||
gradientTransform="matrix(1,0,0,0.342857,0,25.61831)"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient35500"
|
||||
id="linearGradient35506"
|
||||
x1="21.204315"
|
||||
y1="21.699249"
|
||||
x2="20.155914"
|
||||
y2="-26.908371"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient39155"
|
||||
id="linearGradient39161"
|
||||
x1="31.1875"
|
||||
y1="18.875"
|
||||
x2="29.875"
|
||||
y2="34.375"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient39171"
|
||||
id="radialGradient39177"
|
||||
cx="26.109201"
|
||||
cy="19.668886"
|
||||
fx="26.109201"
|
||||
fy="19.668886"
|
||||
r="20.278975"
|
||||
gradientTransform="matrix(1.647222,0,0,1.26792,-15.47413,-5.79794)"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#e0e0e0"
|
||||
borderopacity="1"
|
||||
gridtolerance="10000"
|
||||
guidetolerance="10"
|
||||
objecttolerance="10"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="16.245778"
|
||||
inkscape:cx="10.326105"
|
||||
inkscape:cy="15.440713"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
width="48px"
|
||||
height="48px"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:window-width="1392"
|
||||
inkscape:window-height="976"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
showgrid="false"
|
||||
inkscape:window-maximized="1" />
|
||||
<metadata
|
||||
id="metadata7859">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:creator>
|
||||
<cc:Agent>
|
||||
<dc:title>Jakub Steiner</dc:title>
|
||||
</cc:Agent>
|
||||
</dc:creator>
|
||||
<dc:source>http://jimmac.musichall.cz</dc:source>
|
||||
<cc:license
|
||||
rdf:resource="http://creativecommons.org/licenses/GPL/2.0/" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
<cc:License
|
||||
rdf:about="http://creativecommons.org/licenses/GPL/2.0/">
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/Reproduction" />
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/Distribution" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/Notice" />
|
||||
<cc:permits
|
||||
rdf:resource="http://web.resource.org/cc/DerivativeWorks" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/ShareAlike" />
|
||||
<cc:requires
|
||||
rdf:resource="http://web.resource.org/cc/SourceCode" />
|
||||
</cc:License>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(0,-26)">
|
||||
<g
|
||||
id="blender"
|
||||
transform="matrix(0.45418175,0,0,0.45418175,-0.2261234,25.847379)"
|
||||
inkscape:label="blender">
|
||||
<path
|
||||
transform="matrix(1.274286,0,0,1.377124,-7.569123,-16.70193)"
|
||||
d="m 43.487067,38.98439 c 0,2.928932 -6.925242,5.303301 -15.467961,5.303301 -8.542719,0 -15.467961,-2.374369 -15.467961,-5.303301 0,-2.928932 6.925242,-5.303301 15.467961,-5.303301 8.542719,0 15.467961,2.374369 15.467961,5.303301 z"
|
||||
sodipodi:ry="5.3033009"
|
||||
sodipodi:rx="15.467961"
|
||||
sodipodi:cy="38.98439"
|
||||
sodipodi:cx="28.019106"
|
||||
id="path35486"
|
||||
style="opacity:0.54857142;color:#000000;fill:url(#radialGradient35494);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssssssssscccsscccscccssccc"
|
||||
d="m 16.048489,28.093447 c 0.0098,0.576682 0.196474,1.697902 0.471116,2.577425 0.581566,1.854137 1.56684,3.572658 2.939126,5.086496 1.407488,1.553118 3.138519,2.803227 5.139315,3.68976 2.105357,0.931573 4.384795,1.407488 6.750134,1.403741 2.365339,-0.005 4.644601,-0.488686 6.74896,-1.427017 2.00002,-0.895288 3.731043,-2.148391 5.13754,-3.705517 1.369207,-1.519844 2.352576,-3.241114 2.934089,-5.096258 0.294262,-0.938353 0.476921,-1.889392 0.553238,-2.845308 0.07331,-0.939306 0.04204,-1.883511 -0.09183,-2.823792 -0.259981,-1.835599 -0.896294,-3.556847 -1.872652,-5.12758 -0.895541,-1.441699 -2.047808,-2.70454 -3.417268,-3.766975 0,0 0.002,-0.002 0.002,-0.002 0,0 -13.828458,-10.6197195 -13.828458,-10.6197195 -0.01176,-0.00978 -0.02252,-0.019551 -0.03529,-0.028344 -0.909003,-0.6959264 -2.434775,-0.6939758 -3.431728,0.00488 -1.01067,0.7057021 -1.091821,1.8092613 -0.195527,2.5482146 1.899775,1.4997633 3.792068,3.0680399 5.702368,4.5676189 0,0 -17.551681,-0.01171 -17.551681,-0.01171 -1.994685,0 -3.1682604,0.947915 -3.4153942,2.333683 -0.2180771,1.222836 0.7479213,2.738129 2.4800212,2.738129 2.956573,0.0039 5.942111,-0.0069 8.909215,-0.01272 0,0 -15.901723,11.764162 -15.901723,11.764162 -0.020527,0.01564 -0.041053,0.02933 -0.06158,0.04497 -1.4974197,1.148389 -1.9831951,3.059322 -1.0399808,4.268393 0.9598323,1.22959 2.9977653,1.230588 4.5147288,0.006 0,0 8.677593,-7.102098 8.677593,-7.102098 0,0 -0.12511,0.959824 -0.116333,1.535532 z"
|
||||
id="path2482"
|
||||
style="fill:#f57900;fill-rule:evenodd;stroke:#ce5c00;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
transform="matrix(0.821621,0,0,0.839506,5.875686,3.882724)"
|
||||
d="m 42.75,25.75 c 0,5.591883 -5.176708,10.125 -11.5625,10.125 -6.385792,0 -11.5625,-4.533117 -11.5625,-10.125 0,-5.591883 5.176708,-10.125 11.5625,-10.125 6.385792,0 11.5625,4.533117 11.5625,10.125 z"
|
||||
sodipodi:ry="10.125"
|
||||
sodipodi:rx="11.5625"
|
||||
sodipodi:cy="25.75"
|
||||
sodipodi:cx="31.1875"
|
||||
id="path39153"
|
||||
style="color:#000000;fill:url(#linearGradient39161);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="cssssscscczccsccssssccscccccscssc"
|
||||
id="path3562"
|
||||
d="m 25.796988,6.0267804 c -0.404852,5.53e-4 -0.818619,0.1256944 -1.095272,0.3196267 -7.14e-4,7.142e-4 -0.0014,0.00143 -0.0021,0.00213 -0.280209,0.1956525 -0.336859,0.3680061 -0.345206,0.4602725 -0.0083,0.092266 -0.01324,0.1672776 0.189655,0.3345475 0.01899,0.015735 0.03747,0.032076 0.0554,0.049009 0.124258,0.1010285 5.704394,4.6389489 5.704394,4.6389489 0.373658,0.304091 0.51584,0.810232 0.355197,1.264415 -0.160635,0.454191 -0.589422,0.382732 -1.071174,0.384283 -5.634142,0.05114 -17.60967,0.01918 -17.60967,0.01918 -0.952967,6.38e-4 -2.3472795,0.516793 -2.4135719,1.585761 -0.063562,1.024947 0.9093059,1.457499 1.5782589,1.457499 0,0 8.830403,-0.01705 8.830403,-0.01705 0.488364,-5.91e-4 0.922857,0.221532 1.080466,0.683755 0.15761,0.462231 0.0033,0.53156 -0.383664,0.829439 0,0 -15.9006939,12.205735 -15.9006939,12.205735 -0.00142,0.0014 -0.00284,0.0028 -0.00426,0.0043 -0.064038,0.04879 -0.084772,0.06226 -0.061795,0.04476 -0.5536756,0.424618 -0.8961097,0.98072 -1.0185711,1.476701 -0.1224537,0.495981 -0.04659,0.882548 0.1875202,1.182646 0.4788333,0.613413 1.7693735,0.732111 2.8980115,-0.178996 0,0 8.6727243,-7.09799 8.6727243,-7.09799 0.361955,-0.295752 0.867758,-0.340606 1.276111,-0.113169 0.408345,0.227437 0.636512,0.681082 0.575631,1.144518 0,0 -0.112502,0.980045 -0.10655,1.370159 0.192357,2.636407 1.448328,4.914995 3.115366,6.91474 2.877746,3.172809 6.84939,4.556285 11.042271,4.719919 4.20342,-0.04394 8.185784,-1.662428 11.042264,-4.758277 5.218918,-6.385867 3.941737,-13.3639 -1.747326,-17.993227 C 36.14442,13.301598 31.42752,9.8792062 26.81986,6.3400589 c -0.0043,-0.00352 -0.0086,-0.00707 -0.01279,-0.010651 -0.0072,-0.00489 -0.01427,-0.00987 -0.02131,-0.014921 -0.210578,-0.1612288 -0.584681,-0.288267 -0.988772,-0.2877065 z"
|
||||
style="opacity:0.4857143;fill:none;stroke:url(#linearGradient34576);stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 26.007076,24.754048 c 0.07447,-1.361157 0.739293,-2.562655 1.738705,-3.413271 0.983518,-0.836183 2.304215,-1.346747 3.746876,-1.346747 1.441743,0 2.762441,0.510564 3.745729,1.346747 1.000515,0.850616 1.664539,2.051213 1.739875,3.41237 0.07718,1.400852 -0.4828,2.701576 -1.46425,3.66495 -1.000516,0.981409 -2.427099,1.597503 -4.021354,1.597503 -1.595172,0 -3.021756,-0.616094 -4.022225,-1.597503 -0.982461,-0.963374 -1.540507,-2.264098 -1.463356,-3.664049 z"
|
||||
id="path2478"
|
||||
style="fill:#3465a4;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssscsccsscsccssssscsscccsssc"
|
||||
id="path39166"
|
||||
d="m 25.8125,6.03125 c -0.404852,5.528e-4 -0.848347,0.1185677 -1.125,0.3125 -0.280209,0.1956523 -0.335403,0.3764836 -0.34375,0.46875 -0.0083,0.092267 -0.01539,0.1764801 0.1875,0.34375 0.01899,0.015735 0.04457,0.014317 0.0625,0.03125 0.124258,0.1010283 5.71875,4.65625 5.71875,4.65625 0.373658,0.304091 0.504393,0.795817 0.34375,1.25 -0.160635,0.454191 -0.580748,0.373449 -1.0625,0.375 -5.634142,0.05114 -17.625,0.03125 -17.625,0.03125 -0.952967,6.38e-4 -2.3399576,0.524782 -2.40625,1.59375 -0.063562,1.024947 0.924797,1.4375 1.59375,1.4375 0,-1e-6 8.8125,0 8.8125,0 0.488364,-5.92e-4 0.936141,0.225277 1.09375,0.6875 0.157609,0.462231 -0.01926,0.514621 -0.40625,0.8125 0,0 -15.875,12.21875 -15.875,12.21875 -0.00142,0.0014 -0.029829,-0.0014 -0.03125,0 -0.064037,0.04879 -0.054226,0.04875 -0.03125,0.03125 -0.5536758,0.424619 -0.9087886,1.004019 -1.03125,1.5 -0.1224536,0.495981 -0.04661,0.856152 0.1875,1.15625 0.4788333,0.613413 1.777612,0.754857 2.90625,-0.15625 1e-7,10e-7 8.65625,-7.09375 8.65625,-7.09375 0.361955,-0.295753 0.872897,-0.352437 1.28125,-0.125 0.408345,0.227436 0.623381,0.692814 0.5625,1.15625 0,-1e-6 -0.0997,0.953636 -0.09375,1.34375 0.09498,1.301756 0.451616,2.521825 0.989039,3.664234 C 20.799917,36.321089 27.770982,19.392853 44.1875,21.03125 43.339652,19.54368 42.151282,18.185293 40.65625,16.96875 36.159865,13.309932 31.42016,9.8828973 26.8125,6.34375 26.805335,6.3388584 26.788292,6.317553 26.78125,6.3125 26.570707,6.1513121 26.216591,6.0306895 25.8125,6.03125 z"
|
||||
style="opacity:0.51999996;fill:url(#radialGradient39177);fill-opacity:1;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 1.9 KiB After Width: | Height: | Size: 1.9 KiB |
@ -1,8 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://web.resource.org/cc/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
@ -13,15 +14,14 @@
|
||||
height="32"
|
||||
id="svg7854"
|
||||
sodipodi:version="0.32"
|
||||
inkscape:version="0.44+devel"
|
||||
inkscape:version="0.48.0 r9654"
|
||||
version="1.0"
|
||||
sodipodi:docbase="/home/jimmac/gfx/ximian/art/icons/application-icons/blender/32x32"
|
||||
sodipodi:docname="blender.svg"
|
||||
inkscape:output_extension="org.inkscape.output.svg.inkscape"
|
||||
inkscape:export-filename="/home/jimmac/gfx/ximian/art/icons/application-icons/blender/32x32/blender.png"
|
||||
sodipodi:modified="true"
|
||||
inkscape:export-filename="/home/user/my/blender/builds/blender/release/freedesktop/icons/32x32/blender.png"
|
||||
inkscape:export-xdpi="90"
|
||||
inkscape:export-ydpi="90"
|
||||
sodipodi:modified="true">
|
||||
inkscape:export-ydpi="90">
|
||||
<defs
|
||||
id="defs7856">
|
||||
<linearGradient
|
||||
@ -88,11 +88,11 @@
|
||||
xlink:href="#linearGradient3564"
|
||||
id="linearGradient34576"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
x1="213.58719"
|
||||
y1="195.85153"
|
||||
x2="183.16304"
|
||||
y2="-404.09323"
|
||||
gradientTransform="matrix(4.454064e-2,7.407134e-4,-7.396764e-4,4.445763e-2,14.51614,17.23777)" />
|
||||
x1="185.9903"
|
||||
y1="193.33229"
|
||||
x2="190.46461"
|
||||
y2="-458.05771"
|
||||
gradientTransform="matrix(0.06818845,0,0,0.06818845,22.51112,27.02885)" />
|
||||
<radialGradient
|
||||
inkscape:collect="always"
|
||||
xlink:href="#linearGradient35488"
|
||||
@ -111,7 +111,7 @@
|
||||
x1="21.204315"
|
||||
y1="21.699249"
|
||||
x2="20.155914"
|
||||
y2="-26.279823"
|
||||
y2="-26.908371"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
<linearGradient
|
||||
inkscape:collect="always"
|
||||
@ -131,7 +131,7 @@
|
||||
fx="26.109201"
|
||||
fy="19.668886"
|
||||
r="20.278975"
|
||||
gradientTransform="matrix(1.034353,0,0,0.794876,-9.267532,-3.559595)"
|
||||
gradientTransform="matrix(1.647222,0,0,1.26792,-15.47413,-5.79794)"
|
||||
gradientUnits="userSpaceOnUse" />
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
@ -144,19 +144,20 @@
|
||||
objecttolerance="10"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1"
|
||||
inkscape:cx="43.462736"
|
||||
inkscape:cy="4.2521067"
|
||||
inkscape:zoom="8.1228891"
|
||||
inkscape:cx="39.360762"
|
||||
inkscape:cy="28.503541"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
width="32px"
|
||||
height="32px"
|
||||
width="48px"
|
||||
height="48px"
|
||||
inkscape:showpageshadow="false"
|
||||
inkscape:window-width="1046"
|
||||
inkscape:window-height="975"
|
||||
inkscape:window-x="66"
|
||||
inkscape:window-y="135"
|
||||
showgrid="true" />
|
||||
inkscape:window-width="1392"
|
||||
inkscape:window-height="976"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="0"
|
||||
showgrid="false"
|
||||
inkscape:window-maximized="1" />
|
||||
<metadata
|
||||
id="metadata7859">
|
||||
<rdf:RDF>
|
||||
@ -173,6 +174,7 @@
|
||||
<dc:source>http://jimmac.musichall.cz</dc:source>
|
||||
<cc:license
|
||||
rdf:resource="http://creativecommons.org/licenses/GPL/2.0/" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
<cc:License
|
||||
rdf:about="http://creativecommons.org/licenses/GPL/2.0/">
|
||||
@ -194,45 +196,55 @@
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<path
|
||||
sodipodi:type="arc"
|
||||
style="opacity:0.54857142;color:black;fill:url(#radialGradient35494);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;marker:none;marker-start:none;marker-mid:none;marker-end:none;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
id="path35486"
|
||||
sodipodi:cx="28.019106"
|
||||
sodipodi:cy="38.98439"
|
||||
sodipodi:rx="15.467961"
|
||||
sodipodi:ry="5.3033009"
|
||||
d="M 43.487067 38.98439 A 15.467961 5.3033009 0 1 1 12.551145,38.98439 A 15.467961 5.3033009 0 1 1 43.487067 38.98439 z"
|
||||
transform="matrix(0.855157,0,0,0.922661,-5.661873,-11.9649)" />
|
||||
<path
|
||||
style="fill:#f57900;fill-rule:evenodd;stroke:#ce5c00;stroke-width:0.99999976;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
id="path2482"
|
||||
d="M 10.187586,18.047598 C 10.194151,18.43397 10.319437,19.185177 10.503745,19.77445 C 10.894027,21.016706 11.555231,22.168099 12.476153,23.182358 C 13.420699,24.222933 14.58237,25.060495 15.925076,25.654464 C 17.337953,26.27861 18.867652,26.597469 20.454999,26.594958 C 22.042346,26.591604 23.571927,26.267543 24.984133,25.638869 C 26.326319,25.039035 27.487985,24.199467 28.431866,23.156206 C 29.350723,22.137924 30.010647,20.984688 30.400893,19.741759 C 30.598368,19.11307 30.720948,18.475882 30.772163,17.835427 C 30.821363,17.2061 30.800373,16.573491 30.710538,15.943511 C 30.536069,14.713677 30.109048,13.560455 29.453827,12.508078 C 28.852842,11.542152 28.079572,10.69606 27.160546,9.9842375 C 27.160546,9.9842375 27.161885,9.9829095 27.161885,9.9829095 C 27.161885,9.9829095 17.881797,2.8677916 17.881797,2.8677916 C 17.873902,2.861242 17.866682,2.8546924 17.858117,2.8488013 C 17.248098,2.3825367 16.224172,2.1628727 15.555131,2.6311026 C 14.876884,3.1039168 14.822425,4.0642627 15.423916,4.5593548 C 16.698828,5.564183 17.880332,6.5265263 19.162309,7.531231 C 19.162309,7.531231 6.4113444,7.545485 6.4113444,7.545485 C 5.0727388,7.545485 4.4964734,8.5678876 4.4728736,9.5288768 C 4.4498234,10.467487 5.2620561,11.472929 6.424444,11.473885 C 6.424444,11.473885 10.390015,11.491386 12.381198,11.487462 C 8.9576952,13.962892 5.5336567,16.437594 2.1103869,18.913339 C 1.1054891,19.68275 0.96699194,20.963058 1.5999701,21.773125 C 2.2441003,22.596939 3.4242293,22.597608 4.4422427,21.777148 C 4.4422427,21.777148 10.265656,17.018805 10.265656,17.018805 C 10.265656,17.018805 10.181697,17.661879 10.187586,18.047598 z "
|
||||
sodipodi:nodetypes="csssssssssscccssccczsccsccc" />
|
||||
<path
|
||||
sodipodi:type="arc"
|
||||
style="opacity:1;color:black;fill:url(#linearGradient39161);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;marker:none;marker-start:none;marker-mid:none;marker-end:none;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
id="path39153"
|
||||
sodipodi:cx="31.1875"
|
||||
sodipodi:cy="25.75"
|
||||
sodipodi:rx="11.5625"
|
||||
sodipodi:ry="10.125"
|
||||
d="M 42.75 25.75 A 11.5625 10.125 0 1 1 19.625,25.75 A 11.5625 10.125 0 1 1 42.75 25.75 z"
|
||||
transform="matrix(0.551379,0,0,0.562462,3.360761,1.826627)" />
|
||||
<path
|
||||
style="opacity:0.4857143;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:url(#linearGradient34576);stroke-width:0.99999857;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="M 16.183788,3.9296777 C 16.183788,3.9296777 22.025089,8.4966596 22.025089,8.4966596 C 22.025089,8.4966596 6.9390429,8.5415355 6.9390429,8.5415355 C 6.3165584,8.5315997 5.5564454,8.6449512 5.5327974,9.4661791 C 5.5073593,10.349574 6.2671953,10.457569 6.7979051,10.464836 C 6.7979051,10.464836 15.347353,10.518394 15.347353,10.518394 C 15.347353,10.518394 3.079864,19.266484 3.079864,19.266484 C 1.47422,20.468758 1.9792489,22.058029 3.8906658,20.968571 C 3.8906658,20.968571 11.342463,15.000098 11.342463,15.000098 C 11.056594,17.35207 10.954659,19.925404 12.987785,22.280595 C 14.812944,24.394875 17.412379,25.450616 20.14939,25.602847 C 22.895537,25.619861 25.602761,24.674185 27.502194,22.686776 C 30.98046,18.579998 30.310289,13.928183 26.644414,10.848146 C 23.747065,8.4138175 20.584277,6.070626 17.555006,3.7958257 C 16.683886,3.1416665 15.680806,3.4366807 16.183788,3.9296777 z "
|
||||
id="path3562"
|
||||
sodipodi:nodetypes="ccczcccscsccsss" />
|
||||
<path
|
||||
style="fill:#3465a4;fill-rule:evenodd;stroke:none;stroke-width:1.0074476;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
id="path2478"
|
||||
d="M 16.870671,15.810225 C 16.920643,14.898262 17.366801,14.093269 18.037494,13.523364 C 18.697519,12.963129 19.583821,12.621056 20.551972,12.621056 C 21.519507,12.621056 22.40581,12.963129 23.065682,13.523364 C 23.737114,14.093269 24.182732,14.897658 24.233288,15.809622 C 24.285081,16.74818 23.909289,17.619653 23.250649,18.265104 C 22.579217,18.92264 21.621855,19.335417 20.551972,19.335417 C 19.481474,19.335417 18.524111,18.92264 17.85271,18.265104 C 17.193393,17.619653 16.818896,16.74818 16.870671,15.810225 z " />
|
||||
<path
|
||||
style="opacity:0.51999996;fill:url(#radialGradient39177);fill-opacity:1;fill-rule:evenodd;stroke:none;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="M 15.941783,4.3847816 C 17.160224,5.348799 19.447413,7.1153436 20.65481,8.0968354 C 20.65481,8.0968354 9.073948,8.076863 6.1653597,8.0745003 C 5.5669549,8.0749003 5.063419,8.5382068 5.0653433,9.4713907 C 5.0672676,10.404575 5.8023091,10.947103 6.2223706,10.947103 C 6.2223706,10.947103 13.891753,10.97234 13.891753,10.97234 C 13.891753,10.97234 3.0882332,18.695586 3.0882332,18.695586 C 1.1062913,20.339688 3.0177499,22.143676 4.8637336,20.782461 C 4.8637337,20.782461 10.861827,15.991544 10.861827,15.991544 C 10.861827,15.991544 10.271137,18.152894 11.144276,20.090121 C 12.499624,23.097221 18.793729,11.835099 29.102295,12.862231 C 28.569898,11.929654 27.713189,10.967578 26.774401,10.204911 C 26.774401,10.204911 20.820024,5.6743082 17.926696,3.45557 C 16.156642,2.1700393 14.822071,3.3579146 15.941783,4.3847816 z "
|
||||
id="path39166"
|
||||
sodipodi:nodetypes="ccczcccccscscc" />
|
||||
id="layer1"
|
||||
transform="translate(0,-16)">
|
||||
<g
|
||||
id="blender"
|
||||
transform="matrix(0.65782075,0,0,0.65782075,-0.38501735,15.782256)"
|
||||
inkscape:label="blender">
|
||||
<path
|
||||
transform="matrix(1.274286,0,0,1.377124,-7.569123,-16.70193)"
|
||||
d="m 43.487067,38.98439 c 0,2.928932 -6.925242,5.303301 -15.467961,5.303301 -8.542719,0 -15.467961,-2.374369 -15.467961,-5.303301 0,-2.928932 6.925242,-5.303301 15.467961,-5.303301 8.542719,0 15.467961,2.374369 15.467961,5.303301 z"
|
||||
sodipodi:ry="5.3033009"
|
||||
sodipodi:rx="15.467961"
|
||||
sodipodi:cy="38.98439"
|
||||
sodipodi:cx="28.019106"
|
||||
id="path35486"
|
||||
style="opacity:0.54857142;color:#000000;fill:url(#radialGradient35494);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssssssssscccsscccscccssccc"
|
||||
d="m 16.048489,28.093447 c 0.0098,0.576682 0.196474,1.697902 0.471116,2.577425 0.581566,1.854137 1.56684,3.572658 2.939126,5.086496 1.407488,1.553118 3.138519,2.803227 5.139315,3.68976 2.105357,0.931573 4.384795,1.407488 6.750134,1.403741 2.365339,-0.005 4.644601,-0.488686 6.74896,-1.427017 2.00002,-0.895288 3.731043,-2.148391 5.13754,-3.705517 1.369207,-1.519844 2.352576,-3.241114 2.934089,-5.096258 0.294262,-0.938353 0.476921,-1.889392 0.553238,-2.845308 0.07331,-0.939306 0.04204,-1.883511 -0.09183,-2.823792 -0.259981,-1.835599 -0.896294,-3.556847 -1.872652,-5.12758 -0.895541,-1.441699 -2.047808,-2.70454 -3.417268,-3.766975 0,0 0.002,-0.002 0.002,-0.002 0,0 -13.828458,-10.6197195 -13.828458,-10.6197195 -0.01176,-0.00978 -0.02252,-0.019551 -0.03529,-0.028344 -0.909003,-0.6959264 -2.434775,-0.6939758 -3.431728,0.00488 -1.01067,0.7057021 -1.091821,1.8092613 -0.195527,2.5482146 1.899775,1.4997633 3.792068,3.0680399 5.702368,4.5676189 0,0 -17.551681,-0.01171 -17.551681,-0.01171 -1.994685,0 -3.1682604,0.947915 -3.4153942,2.333683 -0.2180771,1.222836 0.7479213,2.738129 2.4800212,2.738129 2.956573,0.0039 5.942111,-0.0069 8.909215,-0.01272 0,0 -15.901723,11.764162 -15.901723,11.764162 -0.020527,0.01564 -0.041053,0.02933 -0.06158,0.04497 -1.4974197,1.148389 -1.9831951,3.059322 -1.0399808,4.268393 0.9598323,1.22959 2.9977653,1.230588 4.5147288,0.006 0,0 8.677593,-7.102098 8.677593,-7.102098 0,0 -0.12511,0.959824 -0.116333,1.535532 z"
|
||||
id="path2482"
|
||||
style="fill:#f57900;fill-rule:evenodd;stroke:#ce5c00;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
transform="matrix(0.821621,0,0,0.839506,5.875686,3.882724)"
|
||||
d="m 42.75,25.75 c 0,5.591883 -5.176708,10.125 -11.5625,10.125 -6.385792,0 -11.5625,-4.533117 -11.5625,-10.125 0,-5.591883 5.176708,-10.125 11.5625,-10.125 6.385792,0 11.5625,4.533117 11.5625,10.125 z"
|
||||
sodipodi:ry="10.125"
|
||||
sodipodi:rx="11.5625"
|
||||
sodipodi:cy="25.75"
|
||||
sodipodi:cx="31.1875"
|
||||
id="path39153"
|
||||
style="color:#000000;fill:url(#linearGradient39161);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:1;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate"
|
||||
sodipodi:type="arc" />
|
||||
<path
|
||||
sodipodi:nodetypes="cssssscscczccsccssssccscccccscssc"
|
||||
id="path3562"
|
||||
d="m 25.796988,6.0267804 c -0.404852,5.53e-4 -0.818619,0.1256944 -1.095272,0.3196267 -7.14e-4,7.142e-4 -0.0014,0.00143 -0.0021,0.00213 -0.280209,0.1956525 -0.336859,0.3680061 -0.345206,0.4602725 -0.0083,0.092266 -0.01324,0.1672776 0.189655,0.3345475 0.01899,0.015735 0.03747,0.032076 0.0554,0.049009 0.124258,0.1010285 5.704394,4.6389489 5.704394,4.6389489 0.373658,0.304091 0.51584,0.810232 0.355197,1.264415 -0.160635,0.454191 -0.589422,0.382732 -1.071174,0.384283 -5.634142,0.05114 -17.60967,0.01918 -17.60967,0.01918 -0.952967,6.38e-4 -2.3472795,0.516793 -2.4135719,1.585761 -0.063562,1.024947 0.9093059,1.457499 1.5782589,1.457499 0,0 8.830403,-0.01705 8.830403,-0.01705 0.488364,-5.91e-4 0.922857,0.221532 1.080466,0.683755 0.15761,0.462231 0.0033,0.53156 -0.383664,0.829439 0,0 -15.9006939,12.205735 -15.9006939,12.205735 -0.00142,0.0014 -0.00284,0.0028 -0.00426,0.0043 -0.064038,0.04879 -0.084772,0.06226 -0.061795,0.04476 -0.5536756,0.424618 -0.8961097,0.98072 -1.0185711,1.476701 -0.1224537,0.495981 -0.04659,0.882548 0.1875202,1.182646 0.4788333,0.613413 1.7693735,0.732111 2.8980115,-0.178996 0,0 8.6727243,-7.09799 8.6727243,-7.09799 0.361955,-0.295752 0.867758,-0.340606 1.276111,-0.113169 0.408345,0.227437 0.636512,0.681082 0.575631,1.144518 0,0 -0.112502,0.980045 -0.10655,1.370159 0.192357,2.636407 1.448328,4.914995 3.115366,6.91474 2.877746,3.172809 6.84939,4.556285 11.042271,4.719919 4.20342,-0.04394 8.185784,-1.662428 11.042264,-4.758277 5.218918,-6.385867 3.941737,-13.3639 -1.747326,-17.993227 C 36.14442,13.301598 31.42752,9.8792062 26.81986,6.3400589 c -0.0043,-0.00352 -0.0086,-0.00707 -0.01279,-0.010651 -0.0072,-0.00489 -0.01427,-0.00987 -0.02131,-0.014921 -0.210578,-0.1612288 -0.584681,-0.288267 -0.988772,-0.2877065 z"
|
||||
style="opacity:0.4857143;fill:none;stroke:url(#linearGradient34576);stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
d="m 26.007076,24.754048 c 0.07447,-1.361157 0.739293,-2.562655 1.738705,-3.413271 0.983518,-0.836183 2.304215,-1.346747 3.746876,-1.346747 1.441743,0 2.762441,0.510564 3.745729,1.346747 1.000515,0.850616 1.664539,2.051213 1.739875,3.41237 0.07718,1.400852 -0.4828,2.701576 -1.46425,3.66495 -1.000516,0.981409 -2.427099,1.597503 -4.021354,1.597503 -1.595172,0 -3.021756,-0.616094 -4.022225,-1.597503 -0.982461,-0.963374 -1.540507,-2.264098 -1.463356,-3.664049 z"
|
||||
id="path2478"
|
||||
style="fill:#3465a4;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
sodipodi:nodetypes="csssscsccsscsccssssscsscccsssc"
|
||||
id="path39166"
|
||||
d="m 25.8125,6.03125 c -0.404852,5.528e-4 -0.848347,0.1185677 -1.125,0.3125 -0.280209,0.1956523 -0.335403,0.3764836 -0.34375,0.46875 -0.0083,0.092267 -0.01539,0.1764801 0.1875,0.34375 0.01899,0.015735 0.04457,0.014317 0.0625,0.03125 0.124258,0.1010283 5.71875,4.65625 5.71875,4.65625 0.373658,0.304091 0.504393,0.795817 0.34375,1.25 -0.160635,0.454191 -0.580748,0.373449 -1.0625,0.375 -5.634142,0.05114 -17.625,0.03125 -17.625,0.03125 -0.952967,6.38e-4 -2.3399576,0.524782 -2.40625,1.59375 -0.063562,1.024947 0.924797,1.4375 1.59375,1.4375 0,-1e-6 8.8125,0 8.8125,0 0.488364,-5.92e-4 0.936141,0.225277 1.09375,0.6875 0.157609,0.462231 -0.01926,0.514621 -0.40625,0.8125 0,0 -15.875,12.21875 -15.875,12.21875 -0.00142,0.0014 -0.029829,-0.0014 -0.03125,0 -0.064037,0.04879 -0.054226,0.04875 -0.03125,0.03125 -0.5536758,0.424619 -0.9087886,1.004019 -1.03125,1.5 -0.1224536,0.495981 -0.04661,0.856152 0.1875,1.15625 0.4788333,0.613413 1.777612,0.754857 2.90625,-0.15625 1e-7,10e-7 8.65625,-7.09375 8.65625,-7.09375 0.361955,-0.295753 0.872897,-0.352437 1.28125,-0.125 0.408345,0.227436 0.623381,0.692814 0.5625,1.15625 0,-1e-6 -0.0997,0.953636 -0.09375,1.34375 0.09498,1.301756 0.451616,2.521825 0.989039,3.664234 C 20.799917,36.321089 27.770982,19.392853 44.1875,21.03125 43.339652,19.54368 42.151282,18.185293 40.65625,16.96875 36.159865,13.309932 31.42016,9.8828973 26.8125,6.34375 26.805335,6.3388584 26.788292,6.317553 26.78125,6.3125 26.570707,6.1513121 26.216591,6.0306895 25.8125,6.03125 z"
|
||||
style="opacity:0.51999996;fill:url(#radialGradient39177);fill-opacity:1;fill-rule:evenodd;stroke:none"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 14 KiB |
@ -1,66 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- mode: python; tab-width: 4; indent-tabs-mode: t; -*-
|
||||
# vim: tabstop=4
|
||||
# $Id$
|
||||
# ***** BEGIN GPL LICENSE BLOCK *****
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# The Original Code is Copyright (C) 2008 by the Blender Foundation
|
||||
# All rights reserved.
|
||||
#
|
||||
# The Original Code is: see repository.
|
||||
#
|
||||
# Contributor(s): see repository.
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
nanblenderhome = os.getenv("NANBLENDERHOME")
|
||||
|
||||
if nanblenderhome is None:
|
||||
nanblenderhome = os.path.dirname(os.path.abspath(sys.argv[0])) + "/.."
|
||||
|
||||
config = nanblenderhome + "/source/blender/blenkernel/BKE_blender.h"
|
||||
|
||||
infile = open(config)
|
||||
|
||||
major = None
|
||||
minor = None
|
||||
|
||||
for line in infile.readlines():
|
||||
m = re.search("#define BLENDER_VERSION\s+(\d+)", line)
|
||||
if m:
|
||||
major = m.group(1)
|
||||
m = re.search("#define BLENDER_SUBVERSION\s+(\d+)", line)
|
||||
if m:
|
||||
minor = m.group(1)
|
||||
if minor and major:
|
||||
major = float(major) / 100.0
|
||||
break
|
||||
|
||||
infile.close()
|
||||
|
||||
# Major was changed to float, but minor is still a string
|
||||
if minor and major:
|
||||
if minor == "0":
|
||||
print "%.2f" % major
|
||||
else:
|
||||
print "%.2f.%s" % (major, minor)
|
||||
else:
|
||||
print "unknownversion"
|
@ -187,12 +187,13 @@ void plugin_seq_doit(Cast *cast, float facf0, float facf1, int width,
|
||||
double gamma_table[256];
|
||||
double uv_table[256];
|
||||
float *destf = out->rect_float;
|
||||
float *src1f = ibuf1->rect_float;
|
||||
float *src1f;
|
||||
|
||||
if (!ibuf1) return;
|
||||
|
||||
dest= (char *) out->rect;
|
||||
src1= (char *) ibuf1->rect;
|
||||
src1f= ibuf1->rect_float;
|
||||
|
||||
for (y = 0; y < 256; y++) {
|
||||
float v = 1.0 * y / 255;
|
||||
|
@ -120,12 +120,13 @@ void plugin_seq_doit(Cast *cast, float facf0, float facf1, int width,
|
||||
float gamma_table[256];
|
||||
float uv_table[256];
|
||||
float *destf = out->rect_float;
|
||||
float *src1f = ibuf1->rect_float;
|
||||
float *src1f;
|
||||
|
||||
if (!ibuf1) return;
|
||||
|
||||
dest= (char *) out->rect;
|
||||
src1= (char *) ibuf1->rect;
|
||||
src1f= ibuf1->rect_float;
|
||||
|
||||
for (y = 0; y < 256; y++) {
|
||||
float v = 1.0 * y / 255;
|
||||
|
@ -20,16 +20,18 @@
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "init_data" in locals():
|
||||
reload(model)
|
||||
reload(operators)
|
||||
reload(client)
|
||||
reload(slave)
|
||||
reload(master)
|
||||
reload(master_html)
|
||||
reload(utils)
|
||||
reload(balancing)
|
||||
reload(ui)
|
||||
reload(repath)
|
||||
import imp
|
||||
imp.reload(model)
|
||||
imp.reload(operators)
|
||||
imp.reload(client)
|
||||
imp.reload(slave)
|
||||
imp.reload(master)
|
||||
imp.reload(master_html)
|
||||
imp.reload(utils)
|
||||
imp.reload(balancing)
|
||||
imp.reload(ui)
|
||||
imp.reload(repath)
|
||||
imp.reload(versioning)
|
||||
else:
|
||||
from netrender import model
|
||||
from netrender import operators
|
||||
@ -41,18 +43,25 @@ else:
|
||||
from netrender import balancing
|
||||
from netrender import ui
|
||||
from netrender import repath
|
||||
from netrender import versioning
|
||||
|
||||
jobs = []
|
||||
slaves = []
|
||||
blacklist = []
|
||||
|
||||
init_file = ""
|
||||
init_address = True
|
||||
valid_address = False
|
||||
init_data = True
|
||||
|
||||
|
||||
def register():
|
||||
ui.addProperties()
|
||||
|
||||
import bpy
|
||||
scene = bpy.context.scene
|
||||
if scene:
|
||||
netsettings = scene.network_render
|
||||
ui.init_data(netsettings)
|
||||
|
||||
|
||||
def unregister():
|
||||
|
@ -25,6 +25,9 @@ class RatingRule:
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
|
||||
def id(self):
|
||||
return str(id(self))
|
||||
|
||||
def rate(self, job):
|
||||
return 0
|
||||
|
||||
@ -32,6 +35,9 @@ class ExclusionRule:
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
|
||||
def id(self):
|
||||
return str(id(self))
|
||||
|
||||
def test(self, job):
|
||||
return False
|
||||
|
||||
@ -39,6 +45,9 @@ class PriorityRule:
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
|
||||
def id(self):
|
||||
return str(id(self))
|
||||
|
||||
def test(self, job):
|
||||
return False
|
||||
|
||||
@ -50,13 +59,13 @@ class Balancer:
|
||||
|
||||
def ruleByID(self, rule_id):
|
||||
for rule in self.rules:
|
||||
if id(rule) == rule_id:
|
||||
if rule.id() == rule_id:
|
||||
return rule
|
||||
for rule in self.priorities:
|
||||
if id(rule) == rule_id:
|
||||
if rule.id() == rule_id:
|
||||
return rule
|
||||
for rule in self.exceptions:
|
||||
if id(rule) == rule_id:
|
||||
if rule.id() == rule_id:
|
||||
return rule
|
||||
|
||||
return None
|
||||
|
@ -92,7 +92,82 @@ def addPointCache(job, ob, point_cache, default_path):
|
||||
previous_frame = previous_item[0]
|
||||
job.addFile(cache_path + current_file, previous_frame + 1, next_frame - 1)
|
||||
|
||||
def fillCommonJobSettings(job, job_name, netsettings):
|
||||
job.name = job_name
|
||||
job.category = netsettings.job_category
|
||||
|
||||
for slave in netrender.blacklist:
|
||||
job.blacklist.append(slave.id)
|
||||
|
||||
job.chunks = netsettings.chunks
|
||||
job.priority = netsettings.priority
|
||||
|
||||
if netsettings.job_type == "JOB_BLENDER":
|
||||
job.type = netrender.model.JOB_BLENDER
|
||||
elif netsettings.job_type == "JOB_PROCESS":
|
||||
job.type = netrender.model.JOB_PROCESS
|
||||
elif netsettings.job_type == "JOB_VCS":
|
||||
job.type = netrender.model.JOB_VCS
|
||||
|
||||
def clientSendJob(conn, scene, anim = False):
|
||||
netsettings = scene.network_render
|
||||
if netsettings.job_type == "JOB_BLENDER":
|
||||
return clientSendJobBlender(conn, scene, anim)
|
||||
elif netsettings.job_type == "JOB_VCS":
|
||||
return clientSendJobVCS(conn, scene, anim)
|
||||
|
||||
def clientSendJobVCS(conn, scene, anim = False):
|
||||
netsettings = scene.network_render
|
||||
job = netrender.model.RenderJob()
|
||||
|
||||
if anim:
|
||||
for f in range(scene.frame_start, scene.frame_end + 1):
|
||||
job.addFrame(f)
|
||||
else:
|
||||
job.addFrame(scene.frame_current)
|
||||
|
||||
filename = bpy.data.filepath
|
||||
|
||||
if not filename.startswith(netsettings.vcs_wpath):
|
||||
# this is an error, need better way to handle this
|
||||
return
|
||||
|
||||
filename = filename[len(netsettings.vcs_wpath):]
|
||||
|
||||
if filename[0] in (os.sep, os.altsep):
|
||||
filename = filename[1:]
|
||||
|
||||
print("CREATING VCS JOB", filename)
|
||||
|
||||
job.addFile(filename, signed=False)
|
||||
|
||||
job_name = netsettings.job_name
|
||||
path, name = os.path.split(filename)
|
||||
if job_name == "[default]":
|
||||
job_name = name
|
||||
|
||||
|
||||
fillCommonJobSettings(job, job_name, netsettings)
|
||||
|
||||
# VCS Specific code
|
||||
job.version_info = netrender.model.VersioningInfo()
|
||||
job.version_info.system = netsettings.vcs_system
|
||||
job.version_info.wpath = netsettings.vcs_wpath
|
||||
job.version_info.rpath = netsettings.vcs_rpath
|
||||
job.version_info.revision = netsettings.vcs_revision
|
||||
|
||||
# try to send path first
|
||||
conn.request("POST", "/job", json.dumps(job.serialize()))
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
|
||||
job_id = response.getheader("job-id")
|
||||
|
||||
# a VCS job is always good right now, need error handling
|
||||
|
||||
return job_id
|
||||
|
||||
def clientSendJobBlender(conn, scene, anim = False):
|
||||
netsettings = scene.network_render
|
||||
job = netrender.model.RenderJob()
|
||||
|
||||
@ -140,15 +215,13 @@ def clientSendJob(conn, scene, anim = False):
|
||||
for object in bpy.data.objects:
|
||||
for modifier in object.modifiers:
|
||||
if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
|
||||
addFluidFiles(job, bpy.path.abspath(modifier.settings.path))
|
||||
addFluidFiles(job, bpy.path.abspath(modifier.settings.filepath))
|
||||
elif modifier.type == "CLOTH":
|
||||
addPointCache(job, object, modifier.point_cache, default_path)
|
||||
elif modifier.type == "SOFT_BODY":
|
||||
addPointCache(job, object, modifier.point_cache, default_path)
|
||||
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
||||
addPointCache(job, object, modifier.domain_settings.point_cache_low, default_path)
|
||||
if modifier.domain_settings.use_high_resolution:
|
||||
addPointCache(job, object, modifier.domain_settings.point_cache_high, default_path)
|
||||
addPointCache(job, object, modifier.domain_settings.point_cache, default_path)
|
||||
elif modifier.type == "MULTIRES" and modifier.is_external:
|
||||
file_path = bpy.path.abspath(modifier.filepath)
|
||||
job.addFile(file_path)
|
||||
@ -160,14 +233,7 @@ def clientSendJob(conn, scene, anim = False):
|
||||
|
||||
#print(job.files)
|
||||
|
||||
job.name = job_name
|
||||
job.category = netsettings.job_category
|
||||
|
||||
for slave in netrender.blacklist:
|
||||
job.blacklist.append(slave.id)
|
||||
|
||||
job.chunks = netsettings.chunks
|
||||
job.priority = netsettings.priority
|
||||
fillCommonJobSettings(job, job_name, netsettings)
|
||||
|
||||
# try to send path first
|
||||
conn.request("POST", "/job", json.dumps(job.serialize()))
|
||||
@ -195,7 +261,7 @@ def requestResult(conn, job_id, frame):
|
||||
class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
bl_idname = 'NET_RENDER'
|
||||
bl_label = "Network Render"
|
||||
bl_postprocess = False
|
||||
bl_use_postprocess = False
|
||||
def render(self, scene):
|
||||
if scene.network_render.mode == "RENDER_CLIENT":
|
||||
self.render_client(scene)
|
||||
@ -211,7 +277,7 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
|
||||
address = "" if netsettings.server_address == "[default]" else netsettings.server_address
|
||||
|
||||
master.runMaster((address, netsettings.server_port), netsettings.use_master_broadcast, netsettings.use_master_clear, netsettings.path, self.update_stats, self.test_break)
|
||||
master.runMaster((address, netsettings.server_port), netsettings.use_master_broadcast, netsettings.use_master_clear, bpy.path.abspath(netsettings.path), self.update_stats, self.test_break)
|
||||
|
||||
|
||||
def render_slave(self, scene):
|
||||
@ -236,10 +302,11 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
# reading back result
|
||||
|
||||
self.update_stats("", "Network render waiting for results")
|
||||
|
||||
|
||||
|
||||
requestResult(conn, job_id, scene.frame_current)
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
buf = response.read()
|
||||
|
||||
if response.status == http.client.NO_CONTENT:
|
||||
new_job = True
|
||||
@ -248,13 +315,13 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
|
||||
requestResult(conn, job_id, scene.frame_current)
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
|
||||
buf = response.read()
|
||||
|
||||
while response.status == http.client.ACCEPTED and not self.test_break():
|
||||
time.sleep(1)
|
||||
requestResult(conn, job_id, scene.frame_current)
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
buf = response.read()
|
||||
|
||||
# cancel new jobs (animate on network) on break
|
||||
if self.test_break() and new_job:
|
||||
@ -271,18 +338,22 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
|
||||
r = scene.render
|
||||
x= int(r.resolution_x*r.resolution_percentage*0.01)
|
||||
y= int(r.resolution_y*r.resolution_percentage*0.01)
|
||||
|
||||
result_path = os.path.join(bpy.path.abspath(netsettings.path), "output.exr")
|
||||
|
||||
folder = os.path.split(result_path)[0]
|
||||
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
|
||||
f = open(os.path.join(netsettings.path, "output.exr"), "wb")
|
||||
buf = response.read(1024)
|
||||
f = open(result_path, "wb")
|
||||
|
||||
while buf:
|
||||
f.write(buf)
|
||||
buf = response.read(1024)
|
||||
f.write(buf)
|
||||
|
||||
f.close()
|
||||
|
||||
result = self.begin_result(0, 0, x, y)
|
||||
result.load_from_file(os.path.join(netsettings.path, "output.exr"))
|
||||
result.load_from_file(result_path)
|
||||
self.end_result(result)
|
||||
|
||||
conn.close()
|
||||
@ -294,7 +365,6 @@ def compatible(module):
|
||||
except: pass
|
||||
del module
|
||||
|
||||
#compatible("properties_render")
|
||||
compatible("properties_world")
|
||||
compatible("properties_material")
|
||||
compatible("properties_data_mesh")
|
||||
|
@ -27,6 +27,7 @@ from netrender.utils import *
|
||||
import netrender.model
|
||||
import netrender.balancing
|
||||
import netrender.master_html
|
||||
import netrender.thumbnail as thumbnail
|
||||
|
||||
class MRenderFile(netrender.model.RenderFile):
|
||||
def __init__(self, filepath, index, start, end, signature):
|
||||
@ -35,7 +36,7 @@ class MRenderFile(netrender.model.RenderFile):
|
||||
|
||||
def test(self):
|
||||
self.found = os.path.exists(self.filepath)
|
||||
if self.found:
|
||||
if self.found and self.signature != None:
|
||||
found_signature = hashFile(self.filepath)
|
||||
self.found = self.signature == found_signature
|
||||
|
||||
@ -82,8 +83,6 @@ class MRenderJob(netrender.model.RenderJob):
|
||||
self.save_path = ""
|
||||
self.files = [MRenderFile(rfile.filepath, rfile.index, rfile.start, rfile.end, rfile.signature) for rfile in job_info.files]
|
||||
|
||||
self.resolution = None
|
||||
|
||||
def initInfo(self):
|
||||
if not self.resolution:
|
||||
self.resolution = tuple(getFileInfo(self.files[0].filepath, ["bpy.context.scene.render.resolution_x", "bpy.context.scene.render.resolution_y", "bpy.context.scene.render.resolution_percentage"]))
|
||||
@ -105,9 +104,11 @@ class MRenderJob(netrender.model.RenderJob):
|
||||
self.chunks = info_map["chunks"]
|
||||
|
||||
def testStart(self):
|
||||
for f in self.files:
|
||||
if not f.test():
|
||||
return False
|
||||
# Don't test files for versionned jobs
|
||||
if not self.version_info:
|
||||
for f in self.files:
|
||||
if not f.test():
|
||||
return False
|
||||
|
||||
self.start()
|
||||
self.initInfo()
|
||||
@ -201,6 +202,15 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
# is extremely slow due to some timeout..
|
||||
sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format%args))
|
||||
|
||||
def getInfoMap(self):
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
if length > 0:
|
||||
msg = str(self.rfile.read(length), encoding='utf8')
|
||||
return json.loads(msg)
|
||||
else:
|
||||
return {}
|
||||
|
||||
def send_head(self, code = http.client.OK, headers = {}, content = "application/octet-stream"):
|
||||
self.send_response(code)
|
||||
self.send_header("Content-type", content)
|
||||
@ -297,7 +307,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
elif frame.status == DONE:
|
||||
filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
|
||||
|
||||
thumbname = thumbnail(filename)
|
||||
thumbname = thumbnail.generate(filename)
|
||||
|
||||
if thumbname:
|
||||
f = open(thumbname, 'rb')
|
||||
@ -516,8 +526,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
job = self.server.getJobID(job_id)
|
||||
|
||||
if job:
|
||||
length = int(self.headers['content-length'])
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
info_map = self.getInfoMap()
|
||||
|
||||
job.edit(info_map)
|
||||
self.send_head()
|
||||
@ -529,8 +538,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
self.send_head(http.client.NO_CONTENT)
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/balance_limit":
|
||||
length = int(self.headers['content-length'])
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
info_map = self.getInfoMap()
|
||||
for rule_id, limit in info_map.items():
|
||||
try:
|
||||
rule = self.server.balancer.ruleByID(rule_id)
|
||||
@ -542,8 +550,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
self.send_head()
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/balance_enable":
|
||||
length = int(self.headers['content-length'])
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
info_map = self.getInfoMap()
|
||||
for rule_id, enabled in info_map.items():
|
||||
rule = self.server.balancer.ruleByID(rule_id)
|
||||
if rule:
|
||||
@ -555,13 +562,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
match = cancel_pattern.match(self.path)
|
||||
|
||||
if match:
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
if length > 0:
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
clear = info_map.get("clear", False)
|
||||
else:
|
||||
clear = False
|
||||
info_map = self.getInfoMap()
|
||||
clear = info_map.get("clear", False)
|
||||
|
||||
job_id = match.groups()[0]
|
||||
|
||||
@ -582,13 +584,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
match = pause_pattern.match(self.path)
|
||||
|
||||
if match:
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
if length > 0:
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
status = info_map.get("status", None)
|
||||
else:
|
||||
status = None
|
||||
info_map = self.getInfoMap()
|
||||
status = info_map.get("status", None)
|
||||
|
||||
job_id = match.groups()[0]
|
||||
|
||||
@ -607,13 +604,8 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
elif self.path == "/clear":
|
||||
# cancel all jobs
|
||||
length = int(self.headers['content-length'])
|
||||
|
||||
if length > 0:
|
||||
info_map = eval(str(self.rfile.read(length), encoding='utf8'))
|
||||
clear = info_map.get("clear", False)
|
||||
else:
|
||||
clear = False
|
||||
info_map = self.getInfoMap()
|
||||
clear = info_map.get("clear", False)
|
||||
|
||||
self.server.stats("", "Clearing jobs")
|
||||
self.server.clear(clear)
|
||||
@ -769,7 +761,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
frame = job[job_frame]
|
||||
|
||||
if frame:
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
if job.hasRenderResult():
|
||||
if job_result == DONE:
|
||||
length = int(self.headers['content-length'])
|
||||
buf = self.rfile.read(length)
|
||||
@ -820,7 +812,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
|
||||
frame = job[job_frame]
|
||||
|
||||
if frame:
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
if job.hasRenderResult():
|
||||
length = int(self.headers['content-length'])
|
||||
buf = self.rfile.read(length)
|
||||
f = open(os.path.join(job.save_path, "%06d.jpg" % job_frame), 'wb')
|
||||
|
@ -20,6 +20,7 @@ import os
|
||||
import re
|
||||
import shutil
|
||||
from netrender.utils import *
|
||||
import netrender.model
|
||||
|
||||
src_folder = os.path.split(__file__)[0]
|
||||
|
||||
@ -115,6 +116,7 @@ def get(handler):
|
||||
"id",
|
||||
"name",
|
||||
"category",
|
||||
"type",
|
||||
"chunks",
|
||||
"priority",
|
||||
"usage",
|
||||
@ -139,6 +141,7 @@ def get(handler):
|
||||
job.id,
|
||||
link(job.name, "/html/job" + job.id),
|
||||
job.category if job.category else "<i>None</i>",
|
||||
netrender.model.JOB_TYPES[job.type],
|
||||
str(job.chunks) +
|
||||
"""<button title="increase chunks size" onclick="request('/edit_%s', "{'chunks': %i}");">+</button>""" % (job.id, job.chunks + 1) +
|
||||
"""<button title="decrease chunks size" onclick="request('/edit_%s', "{'chunks': %i}");" %s>-</button>""" % (job.id, job.chunks - 1, "disabled=True" if job.chunks == 1 else ""),
|
||||
@ -180,28 +183,28 @@ def get(handler):
|
||||
for rule in handler.server.balancer.rules:
|
||||
rowTable(
|
||||
"rating",
|
||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
||||
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||
rule,
|
||||
rule.str_limit() +
|
||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
)
|
||||
|
||||
for rule in handler.server.balancer.priorities:
|
||||
rowTable(
|
||||
"priority",
|
||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
||||
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||
rule,
|
||||
rule.str_limit() +
|
||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
)
|
||||
|
||||
for rule in handler.server.balancer.exceptions:
|
||||
rowTable(
|
||||
"exception",
|
||||
checkbox("", rule.enabled, "balance_enable('%i', '%s')" % (id(rule), str(not rule.enabled))),
|
||||
checkbox("", rule.enabled, "balance_enable('%s', '%s')" % (rule.id(), str(not rule.enabled).lower())),
|
||||
rule,
|
||||
rule.str_limit() +
|
||||
"""<button title="edit limit" onclick="balance_edit('%i', '%s');">edit</button>""" % (id(rule), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
"""<button title="edit limit" onclick="balance_edit('%s', '%s');">edit</button>""" % (rule.id(), str(rule.limit)) if hasattr(rule, "limit") else " "
|
||||
)
|
||||
|
||||
endTable()
|
||||
@ -228,39 +231,52 @@ def get(handler):
|
||||
endTable()
|
||||
|
||||
|
||||
output("<h2>Files</h2>")
|
||||
|
||||
startTable()
|
||||
headerTable("path")
|
||||
|
||||
tot_cache = 0
|
||||
tot_fluid = 0
|
||||
|
||||
rowTable(job.files[0].filepath)
|
||||
rowTable("Other Files", class_style = "toggle", extra = "onclick='toggleDisplay(".other", "none", "table-row")'")
|
||||
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bphys"):
|
||||
tot_cache += 1
|
||||
elif file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
tot_fluid += 1
|
||||
else:
|
||||
if file != job.files[0]:
|
||||
rowTable(file.filepath, class_style = "other")
|
||||
|
||||
if tot_cache > 0:
|
||||
rowTable("%i physic cache files" % tot_cache, class_style = "toggle", extra = "onclick='toggleDisplay(".cache", "none", "table-row")'")
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
output("<h2>Files</h2>")
|
||||
|
||||
startTable()
|
||||
headerTable("path")
|
||||
|
||||
tot_cache = 0
|
||||
tot_fluid = 0
|
||||
|
||||
rowTable(job.files[0].filepath)
|
||||
rowTable("Other Files", class_style = "toggle", extra = "onclick='toggleDisplay(".other", "none", "table-row")'")
|
||||
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bphys"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "cache")
|
||||
|
||||
if tot_fluid > 0:
|
||||
rowTable("%i fluid bake files" % tot_fluid, class_style = "toggle", extra = "onclick='toggleDisplay(".fluid", "none", "table-row")'")
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "fluid")
|
||||
|
||||
endTable()
|
||||
tot_cache += 1
|
||||
elif file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
tot_fluid += 1
|
||||
else:
|
||||
if file != job.files[0]:
|
||||
rowTable(file.filepath, class_style = "other")
|
||||
|
||||
if tot_cache > 0:
|
||||
rowTable("%i physic cache files" % tot_cache, class_style = "toggle", extra = "onclick='toggleDisplay(".cache", "none", "table-row")'")
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bphys"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "cache")
|
||||
|
||||
if tot_fluid > 0:
|
||||
rowTable("%i fluid bake files" % tot_fluid, class_style = "toggle", extra = "onclick='toggleDisplay(".fluid", "none", "table-row")'")
|
||||
for file in job.files:
|
||||
if file.filepath.endswith(".bobj.gz") or file.filepath.endswith(".bvel.gz"):
|
||||
rowTable(os.path.split(file.filepath)[1], class_style = "fluid")
|
||||
|
||||
endTable()
|
||||
elif job.type == netrender.model.JOB_VCS:
|
||||
output("<h2>Versioning</h2>")
|
||||
|
||||
startTable()
|
||||
|
||||
rowTable("System", job.version_info.system.name)
|
||||
rowTable("Remote Path", job.version_info.rpath)
|
||||
rowTable("Working Path", job.version_info.wpath)
|
||||
rowTable("Revision", job.version_info.revision)
|
||||
rowTable("Render File", job.files[0].filepath)
|
||||
|
||||
endTable()
|
||||
|
||||
if job.blacklist:
|
||||
output("<h2>Blacklist</h2>")
|
||||
|
@ -20,6 +20,7 @@ import sys, os
|
||||
import http, http.client, http.server, urllib
|
||||
import subprocess, shutil, time, hashlib
|
||||
|
||||
import netrender.versioning as versioning
|
||||
from netrender.utils import *
|
||||
|
||||
class LogFile:
|
||||
@ -96,11 +97,65 @@ class RenderSlave:
|
||||
|
||||
JOB_BLENDER = 1
|
||||
JOB_PROCESS = 2
|
||||
JOB_VCS = 3
|
||||
|
||||
JOB_TYPES = {
|
||||
JOB_BLENDER: "Blender",
|
||||
JOB_PROCESS: "Process"
|
||||
}
|
||||
JOB_BLENDER: "Blender",
|
||||
JOB_PROCESS: "Process",
|
||||
JOB_VCS: "Versioned",
|
||||
}
|
||||
|
||||
class VersioningInfo:
|
||||
def __init__(self, info = None):
|
||||
self._system = None
|
||||
self.wpath = ""
|
||||
self.rpath = ""
|
||||
self.revision = ""
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
return self._system
|
||||
|
||||
@system.setter
|
||||
def system(self, value):
|
||||
self._system = versioning.SYSTEMS[value]
|
||||
|
||||
def update(self):
|
||||
self.system.update(self)
|
||||
|
||||
def serialize(self):
|
||||
return {
|
||||
"wpath": self.wpath,
|
||||
"rpath": self.rpath,
|
||||
"revision": self.revision,
|
||||
"system": self.system.name
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def generate(system, path):
|
||||
vs = VersioningInfo()
|
||||
vs.wpath = path
|
||||
vs.system = system
|
||||
|
||||
vs.rpath = vs.system.path(path)
|
||||
vs.revision = vs.system.revision(path)
|
||||
|
||||
return vs
|
||||
|
||||
|
||||
@staticmethod
|
||||
def materialize(data):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
vs = VersioningInfo()
|
||||
vs.wpath = data["wpath"]
|
||||
vs.rpath = data["rpath"]
|
||||
vs.revision = data["revision"]
|
||||
vs.system = data["system"]
|
||||
|
||||
return vs
|
||||
|
||||
|
||||
class RenderFile:
|
||||
def __init__(self, filepath = "", index = 0, start = -1, end = -1, signature=0):
|
||||
@ -142,6 +197,10 @@ class RenderJob:
|
||||
self.chunks = 0
|
||||
self.priority = 0
|
||||
self.blacklist = []
|
||||
|
||||
self.version_info = None
|
||||
|
||||
self.resolution = None
|
||||
|
||||
self.usage = 0.0
|
||||
self.last_dispatched = 0.0
|
||||
@ -156,9 +215,19 @@ class RenderJob:
|
||||
self.chunks = job_info.chunks
|
||||
self.priority = job_info.priority
|
||||
self.blacklist = job_info.blacklist
|
||||
self.version_info = job_info.version_info
|
||||
|
||||
def addFile(self, file_path, start=-1, end=-1):
|
||||
signature = hashFile(file_path)
|
||||
def hasRenderResult(self):
|
||||
return self.type in (JOB_BLENDER, JOB_VCS)
|
||||
|
||||
def rendersWithBlender(self):
|
||||
return self.type in (JOB_BLENDER, JOB_VCS)
|
||||
|
||||
def addFile(self, file_path, start=-1, end=-1, signed=True):
|
||||
if signed:
|
||||
signature = hashFile(file_path)
|
||||
else:
|
||||
signature = None
|
||||
self.files.append(RenderFile(file_path, len(self.files), start, end, signature))
|
||||
|
||||
def addFrame(self, frame_number, command = ""):
|
||||
@ -225,7 +294,9 @@ class RenderJob:
|
||||
"priority": self.priority,
|
||||
"usage": self.usage,
|
||||
"blacklist": self.blacklist,
|
||||
"last_dispatched": self.last_dispatched
|
||||
"last_dispatched": self.last_dispatched,
|
||||
"version_info": self.version_info.serialize() if self.version_info else None,
|
||||
"resolution": self.resolution
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@ -246,6 +317,11 @@ class RenderJob:
|
||||
job.usage = data["usage"]
|
||||
job.blacklist = data["blacklist"]
|
||||
job.last_dispatched = data["last_dispatched"]
|
||||
job.resolution = data["resolution"]
|
||||
|
||||
version_info = data.get("version_info", None)
|
||||
if version_info:
|
||||
job.version_info = VersioningInfo.materialize(version_info)
|
||||
|
||||
return job
|
||||
|
||||
|
@ -20,9 +20,9 @@ function clear_jobs()
|
||||
var r=confirm("Also delete files on master?");
|
||||
|
||||
if (r==true) {
|
||||
request('/clear', "{'clear':True}");
|
||||
request('/clear', '{"clear":true}');
|
||||
} else {
|
||||
request('/clear', "{'clear':False}");
|
||||
request('/clear', '{"clear":false}');
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,9 +31,9 @@ function cancel_job(id)
|
||||
var r=confirm("Also delete files on master?");
|
||||
|
||||
if (r==true) {
|
||||
request('/cancel_' + id, "{'clear':True}");
|
||||
request('/cancel_' + id, '{"clear":true}');
|
||||
} else {
|
||||
request('/cancel_' + id, "{'clear':False}");
|
||||
request('/cancel_' + id, '{"clear":false}');
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,13 +41,13 @@ function balance_edit(id, old_value)
|
||||
{
|
||||
var new_value = prompt("New limit", old_value);
|
||||
if (new_value != null && new_value != "") {
|
||||
request("/balance_limit", "{" + id + ":'" + new_value + "'}");
|
||||
request("/balance_limit", '{"' + id + '":"' + new_value + '"}');
|
||||
}
|
||||
}
|
||||
|
||||
function balance_enable(id, value)
|
||||
{
|
||||
request("/balance_enable", "{" + id + ":" + value + "}");
|
||||
request("/balance_enable", '{"' + id + '":' + value + "}");
|
||||
}
|
||||
|
||||
function showThumb(job, frame)
|
||||
|
@ -26,6 +26,7 @@ import netrender
|
||||
from netrender.utils import *
|
||||
import netrender.client as client
|
||||
import netrender.model
|
||||
import netrender.versioning as versioning
|
||||
|
||||
class RENDER_OT_netslave_bake(bpy.types.Operator):
|
||||
'''NEED DESCRIPTION'''
|
||||
@ -61,12 +62,9 @@ class RENDER_OT_netslave_bake(bpy.types.Operator):
|
||||
modifier.point_cache.use_disk_cache = True
|
||||
modifier.point_cache.use_external = False
|
||||
elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
|
||||
modifier.domain_settings.point_cache_low.use_step = 1
|
||||
modifier.domain_settings.point_cache_low.use_disk_cache = True
|
||||
modifier.domain_settings.point_cache_low.use_external = False
|
||||
modifier.domain_settings.point_cache_high.use_step = 1
|
||||
modifier.domain_settings.point_cache_high.use_disk_cache = True
|
||||
modifier.domain_settings.point_cache_high.use_external = False
|
||||
modifier.domain_settings.point_cache.use_step = 1
|
||||
modifier.domain_settings.point_cache.use_disk_cache = True
|
||||
modifier.domain_settings.point_cache.use_external = False
|
||||
|
||||
# particles modifier are stupid and don't contain data
|
||||
# we have to go through the object property
|
||||
@ -354,7 +352,7 @@ class RENDER_OT_netclientcancel(bpy.types.Operator):
|
||||
if conn:
|
||||
job = netrender.jobs[netsettings.active_job_index]
|
||||
|
||||
conn.request("POST", cancelURL(job.id))
|
||||
conn.request("POST", cancelURL(job.id), json.dumps({'clear':False}))
|
||||
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
@ -381,7 +379,7 @@ class RENDER_OT_netclientcancelall(bpy.types.Operator):
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
conn.request("POST", "/clear")
|
||||
conn.request("POST", "/clear", json.dumps({'clear':False}))
|
||||
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
@ -403,7 +401,7 @@ class netclientdownload(bpy.types.Operator):
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
netsettings = context.scene.network_render
|
||||
return netsettings.active_job_index >= 0 and len(netsettings.jobs) > 0
|
||||
return netsettings.active_job_index >= 0 and len(netsettings.jobs) > netsettings.active_job_index
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
@ -412,29 +410,71 @@ class netclientdownload(bpy.types.Operator):
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, self.report)
|
||||
|
||||
if conn:
|
||||
job = netrender.jobs[netsettings.active_job_index]
|
||||
|
||||
job_id = netrender.jobs[netsettings.active_job_index].id
|
||||
|
||||
conn.request("GET", "/status", headers={"job-id":job_id})
|
||||
|
||||
response = conn.getresponse()
|
||||
|
||||
if response.status != http.client.OK:
|
||||
self.report('ERROR', "Job ID %i not defined on master" % job_id)
|
||||
return {'ERROR'}
|
||||
|
||||
content = response.read()
|
||||
|
||||
job = netrender.model.RenderJob.materialize(json.loads(str(content, encoding='utf8')))
|
||||
|
||||
conn.close()
|
||||
|
||||
finished_frames = []
|
||||
|
||||
nb_error = 0
|
||||
nb_missing = 0
|
||||
|
||||
for frame in job.frames:
|
||||
client.requestResult(conn, job.id, frame.number)
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
|
||||
if response.status != http.client.OK:
|
||||
print("missing", frame.number)
|
||||
continue
|
||||
|
||||
print("got back", frame.number)
|
||||
|
||||
f = open(os.path.join(netsettings.path, "%06d.exr" % frame.number), "wb")
|
||||
buf = response.read(1024)
|
||||
|
||||
while buf:
|
||||
f.write(buf)
|
||||
buf = response.read(1024)
|
||||
|
||||
f.close()
|
||||
|
||||
conn.close()
|
||||
if frame.status == DONE:
|
||||
finished_frames.append(frame.number)
|
||||
elif frame.status == ERROR:
|
||||
nb_error += 1
|
||||
else:
|
||||
nb_missing += 1
|
||||
|
||||
if not finished_frames:
|
||||
return
|
||||
|
||||
frame_ranges = []
|
||||
|
||||
first = None
|
||||
last = None
|
||||
|
||||
for i in range(len(finished_frames)):
|
||||
current = finished_frames[i]
|
||||
|
||||
if not first:
|
||||
first = current
|
||||
last = current
|
||||
elif last + 1 == current:
|
||||
last = current
|
||||
|
||||
if last + 1 < current or i + 1 == len(finished_frames):
|
||||
if first < last:
|
||||
frame_ranges.append((first, last))
|
||||
else:
|
||||
frame_ranges.append((first,))
|
||||
|
||||
first = current
|
||||
last = current
|
||||
|
||||
getResults(netsettings.server_address, netsettings.server_port, job_id, job.resolution[0], job.resolution[1], job.resolution[2], frame_ranges)
|
||||
|
||||
if nb_error and nb_missing:
|
||||
self.report('ERROR', "Results downloaded but skipped %i frames with errors and %i unfinished frames" % (nb_error, nb_missing))
|
||||
elif nb_error:
|
||||
self.report('ERROR', "Results downloaded but skipped %i frames with errors" % nb_error)
|
||||
elif nb_missing:
|
||||
self.report('WARNING', "Results downloaded but skipped %i unfinished frames" % nb_missing)
|
||||
else:
|
||||
self.report('INFO', "All results downloaded")
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -464,6 +504,38 @@ class netclientscan(bpy.types.Operator):
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
|
||||
class netclientvcsguess(bpy.types.Operator):
|
||||
'''Guess VCS setting for the current file'''
|
||||
bl_idname = "render.netclientvcsguess"
|
||||
bl_label = "VCS Guess"
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
netsettings = context.scene.network_render
|
||||
|
||||
system = versioning.SYSTEMS.get(netsettings.vcs_system, None)
|
||||
|
||||
if system:
|
||||
wpath, name = os.path.split(os.path.abspath(bpy.data.filepath))
|
||||
|
||||
rpath = system.path(wpath)
|
||||
revision = system.revision(wpath)
|
||||
|
||||
netsettings.vcs_wpath = wpath
|
||||
netsettings.vcs_rpath = rpath
|
||||
netsettings.vcs_revision = revision
|
||||
|
||||
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
return self.execute(context)
|
||||
|
||||
|
||||
class netclientweb(bpy.types.Operator):
|
||||
'''Open new window with information about running rendering jobs'''
|
||||
bl_idname = "render.netclientweb"
|
||||
|
@ -21,17 +21,26 @@ import http, http.client, http.server, urllib
|
||||
import subprocess, time
|
||||
import json
|
||||
|
||||
import bpy
|
||||
|
||||
from netrender.utils import *
|
||||
import netrender.model
|
||||
import netrender.repath
|
||||
import netrender.thumbnail as thumbnail
|
||||
|
||||
BLENDER_PATH = sys.argv[0]
|
||||
|
||||
CANCEL_POLL_SPEED = 2
|
||||
MAX_TIMEOUT = 10
|
||||
INCREMENT_TIMEOUT = 1
|
||||
MAX_CONNECT_TRY = 10
|
||||
try:
|
||||
system = platform.system()
|
||||
except UnicodeDecodeError:
|
||||
import sys
|
||||
system = sys.platform
|
||||
|
||||
if platform.system() == 'Windows' and platform.version() >= '5': # Error mode is only available on Win2k or higher, that's version 5
|
||||
if system in ('Windows', 'win32') and platform.version() >= '5': # Error mode is only available on Win2k or higher, that's version 5
|
||||
import ctypes
|
||||
def SetErrorMode():
|
||||
val = ctypes.windll.kernel32.SetErrorMode(0x0002)
|
||||
@ -71,7 +80,7 @@ def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
|
||||
|
||||
found = os.path.exists(job_full_path)
|
||||
|
||||
if found:
|
||||
if found and rfile.signature != None:
|
||||
found_signature = hashFile(job_full_path)
|
||||
found = found_signature == rfile.signature
|
||||
|
||||
@ -104,13 +113,36 @@ def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
|
||||
|
||||
return job_full_path
|
||||
|
||||
def breakable_timeout(timeout):
|
||||
for i in range(timeout):
|
||||
time.sleep(1)
|
||||
if engine.test_break():
|
||||
break
|
||||
|
||||
def render_slave(engine, netsettings, threads):
|
||||
timeout = 1
|
||||
|
||||
bisleep = BreakableIncrementedSleep(INCREMENT_TIMEOUT, 1, MAX_TIMEOUT, engine.test_break)
|
||||
|
||||
engine.update_stats("", "Network render node initiation")
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
|
||||
|
||||
if not conn:
|
||||
timeout = 1
|
||||
print("Connection failed, will try connecting again at most %i times" % MAX_CONNECT_TRY)
|
||||
bisleep.reset()
|
||||
|
||||
for i in range(MAX_CONNECT_TRY):
|
||||
bisleep.sleep()
|
||||
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port)
|
||||
|
||||
if conn or engine.test_break():
|
||||
break
|
||||
|
||||
print("Retry %i failed, waiting %is before retrying" % (i + 1, bisleep.current))
|
||||
|
||||
if conn:
|
||||
conn.request("POST", "/slave", json.dumps(slave_Info().serialize()))
|
||||
response = conn.getresponse()
|
||||
@ -118,7 +150,7 @@ def render_slave(engine, netsettings, threads):
|
||||
|
||||
slave_id = response.getheader("slave-id")
|
||||
|
||||
NODE_PREFIX = os.path.join(netsettings.path, "slave_" + slave_id)
|
||||
NODE_PREFIX = os.path.join(bpy.path.abspath(netsettings.path), "slave_" + slave_id)
|
||||
if not os.path.exists(NODE_PREFIX):
|
||||
os.mkdir(NODE_PREFIX)
|
||||
|
||||
@ -129,7 +161,7 @@ def render_slave(engine, netsettings, threads):
|
||||
response = conn.getresponse()
|
||||
|
||||
if response.status == http.client.OK:
|
||||
timeout = 1 # reset timeout on new job
|
||||
bisleep.reset()
|
||||
|
||||
job = netrender.model.RenderJob.materialize(json.loads(str(response.read(), encoding='utf8')))
|
||||
engine.update_stats("", "Network render processing job from master")
|
||||
@ -138,6 +170,10 @@ def render_slave(engine, netsettings, threads):
|
||||
if not os.path.exists(JOB_PREFIX):
|
||||
os.mkdir(JOB_PREFIX)
|
||||
|
||||
# set tempdir for fsaa temp files
|
||||
# have to set environ var because render is done in a subprocess and that's the easiest way to propagate the setting
|
||||
os.environ["TMP"] = JOB_PREFIX
|
||||
|
||||
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
job_path = job.files[0].filepath # path of main file
|
||||
@ -153,6 +189,20 @@ def render_slave(engine, netsettings, threads):
|
||||
|
||||
netrender.repath.update(job)
|
||||
|
||||
engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
|
||||
elif job.type == netrender.model.JOB_VCS:
|
||||
if not job.version_info:
|
||||
# Need to return an error to server, incorrect job type
|
||||
pass
|
||||
|
||||
job_path = job.files[0].filepath # path of main file
|
||||
main_path, main_file = os.path.split(job_path)
|
||||
|
||||
job.version_info.update()
|
||||
|
||||
# For VCS jobs, file path is relative to the working copy path
|
||||
job_full_path = os.path.join(job.version_info.wpath, job_path)
|
||||
|
||||
engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
|
||||
|
||||
# announce log to master
|
||||
@ -167,7 +217,7 @@ def render_slave(engine, netsettings, threads):
|
||||
# start render
|
||||
start_t = time.time()
|
||||
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
if job.rendersWithBlender():
|
||||
frame_args = []
|
||||
|
||||
for frame in job.frames:
|
||||
@ -252,20 +302,20 @@ def render_slave(engine, netsettings, threads):
|
||||
headers["job-result"] = str(DONE)
|
||||
for frame in job.frames:
|
||||
headers["job-frame"] = str(frame.number)
|
||||
if job.type == netrender.model.JOB_BLENDER:
|
||||
if job.hasRenderResult():
|
||||
# send image back to server
|
||||
|
||||
filename = os.path.join(JOB_PREFIX, "%06d.exr" % frame.number)
|
||||
|
||||
# thumbnail first
|
||||
if netsettings.use_slave_thumb:
|
||||
thumbname = thumbnail(filename)
|
||||
|
||||
f = open(thumbname, 'rb')
|
||||
conn.request("PUT", "/thumb", f, headers=headers)
|
||||
f.close()
|
||||
responseStatus(conn)
|
||||
thumbname = thumbnail.generate(filename)
|
||||
|
||||
if thumbname:
|
||||
f = open(thumbname, 'rb')
|
||||
conn.request("PUT", "/thumb", f, headers=headers)
|
||||
f.close()
|
||||
responseStatus(conn)
|
||||
|
||||
f = open(filename, 'rb')
|
||||
conn.request("PUT", "/render", f, headers=headers)
|
||||
@ -288,13 +338,7 @@ def render_slave(engine, netsettings, threads):
|
||||
|
||||
engine.update_stats("", "Network render connected to master, waiting for jobs")
|
||||
else:
|
||||
if timeout < MAX_TIMEOUT:
|
||||
timeout += INCREMENT_TIMEOUT
|
||||
|
||||
for i in range(timeout):
|
||||
time.sleep(1)
|
||||
if engine.test_break():
|
||||
break
|
||||
bisleep.sleep()
|
||||
|
||||
conn.close()
|
||||
|
||||
|
81
release/scripts/io/netrender/thumbnail.py
Normal file
@ -0,0 +1,81 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
import sys, os
|
||||
import subprocess
|
||||
|
||||
import bpy
|
||||
|
||||
def generate(filename, external=True):
|
||||
if external:
|
||||
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", "-P", __file__, "--", filename], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
while process.poll() is None:
|
||||
process.stdout.read(1024) # empty buffer to be sure
|
||||
process.stdout.read()
|
||||
|
||||
return _thumbname(filename)
|
||||
else:
|
||||
return _internal(filename)
|
||||
|
||||
def _thumbname(filename):
|
||||
root = os.path.splitext(filename)[0]
|
||||
return root + ".jpg"
|
||||
|
||||
def _internal(filename):
|
||||
imagename = os.path.split(filename)[1]
|
||||
thumbname = _thumbname(filename)
|
||||
|
||||
if os.path.exists(thumbname):
|
||||
return thumbname
|
||||
|
||||
if bpy:
|
||||
scene = bpy.data.scenes[0] # FIXME, this is dodgy!
|
||||
scene.render.file_format = "JPEG"
|
||||
scene.render.file_quality = 90
|
||||
|
||||
# remove existing image, if there's a leftover (otherwise open changes the name)
|
||||
if imagename in bpy.data.images:
|
||||
img = bpy.data.images[imagename]
|
||||
bpy.data.images.remove(img)
|
||||
|
||||
bpy.ops.image.open(filepath=filename)
|
||||
img = bpy.data.images[imagename]
|
||||
|
||||
img.save_render(thumbname, scene=scene)
|
||||
|
||||
img.user_clear()
|
||||
bpy.data.images.remove(img)
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(["convert", thumbname, "-resize", "300x300", thumbname])
|
||||
process.wait()
|
||||
return thumbname
|
||||
except Exception as exp:
|
||||
print("Error while generating thumbnail")
|
||||
print(exp)
|
||||
|
||||
return None
|
||||
|
||||
if __name__ == "__main__":
|
||||
import bpy
|
||||
try:
|
||||
start = sys.argv.index("--") + 1
|
||||
except ValueError:
|
||||
start = 0
|
||||
for filename in sys.argv[start:]:
|
||||
generate(filename, external=False)
|
@ -36,6 +36,8 @@ DISPATCHED = 1
|
||||
DONE = 2
|
||||
ERROR = 3
|
||||
|
||||
LAST_ADDRESS_TEST = 0
|
||||
|
||||
def base_poll(cls, context):
|
||||
rd = context.scene.render
|
||||
return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
|
||||
@ -45,7 +47,7 @@ def init_file():
|
||||
if netrender.init_file != bpy.data.filepath:
|
||||
netrender.init_file = bpy.data.filepath
|
||||
netrender.init_data = True
|
||||
netrender.init_address = True
|
||||
netrender.valid_address = False
|
||||
|
||||
def init_data(netsettings):
|
||||
init_file()
|
||||
@ -66,22 +68,31 @@ def init_data(netsettings):
|
||||
netsettings.jobs.remove(0)
|
||||
|
||||
def verify_address(netsettings):
|
||||
global LAST_ADDRESS_TEST
|
||||
init_file()
|
||||
|
||||
if netrender.init_address:
|
||||
netrender.init_address = False
|
||||
if LAST_ADDRESS_TEST + 30 < time.time():
|
||||
LAST_ADDRESS_TEST = time.time()
|
||||
|
||||
try:
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, scan = False)
|
||||
conn = clientConnection(netsettings.server_address, netsettings.server_port, scan = False, timeout = 1)
|
||||
except:
|
||||
conn = None
|
||||
|
||||
if conn:
|
||||
netrender.valid_address = True
|
||||
conn.close()
|
||||
else:
|
||||
netsettings.server_address = "[default]"
|
||||
netrender.valid_address = False
|
||||
|
||||
return netrender.valid_address
|
||||
|
||||
class RenderButtonsPanel():
|
||||
class NeedValidAddress():
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return super().poll(context) and verify_address(context.scene.network_render)
|
||||
|
||||
class NetRenderButtonsPanel():
|
||||
bl_space_type = "PROPERTIES"
|
||||
bl_region_type = "WINDOW"
|
||||
bl_context = "render"
|
||||
@ -90,16 +101,16 @@ class RenderButtonsPanel():
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
rd = context.scene.render
|
||||
return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
|
||||
return rd.engine == 'NET_RENDER' and rd.use_game_engine == False
|
||||
|
||||
# Setting panel, use in the scene for now.
|
||||
class RENDER_PT_network_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_settings(NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Network Settings"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return super(RENDER_PT_network_settings, cls).poll(context)
|
||||
return super().poll(context)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -128,17 +139,20 @@ class RENDER_PT_network_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
|
||||
if netsettings.mode != "RENDER_MASTER":
|
||||
layout.operator("render.netclientscan", icon='FILE_REFRESH', text="")
|
||||
|
||||
if not netrender.valid_address:
|
||||
layout.label(text="No master at specified address")
|
||||
|
||||
layout.operator("render.netclientweb", icon='QUESTION')
|
||||
|
||||
class RENDER_PT_network_slave_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_slave_settings(NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Slave Settings"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
return super(RENDER_PT_network_slave_settings, cls).poll(context) and scene.network_render.mode == "RENDER_SLAVE"
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_SLAVE"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -156,14 +170,14 @@ class RENDER_PT_network_slave_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
sub.enabled = rd.threads_mode == 'FIXED'
|
||||
sub.prop(rd, "threads")
|
||||
|
||||
class RENDER_PT_network_master_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_master_settings(NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Master Settings"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
return super(RENDER_PT_network_master_settings, cls).poll(context) and scene.network_render.mode == "RENDER_MASTER"
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_MASTER"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -174,14 +188,14 @@ class RENDER_PT_network_master_settings(bpy.types.Panel, RenderButtonsPanel):
|
||||
layout.prop(netsettings, "use_master_broadcast")
|
||||
layout.prop(netsettings, "use_master_clear")
|
||||
|
||||
class RENDER_PT_network_job(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_job(NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Job Settings"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
return super(RENDER_PT_network_job, cls).poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
return super().poll(context) and scene.network_render.mode == "RENDER_CLIENT"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -203,10 +217,12 @@ class RENDER_PT_network_job(bpy.types.Panel, RenderButtonsPanel):
|
||||
split = layout.split(percentage=0.3)
|
||||
|
||||
col = split.column()
|
||||
col.label(text="Type:")
|
||||
col.label(text="Name:")
|
||||
col.label(text="Category:")
|
||||
|
||||
col = split.column()
|
||||
col.prop(netsettings, "job_type", text="")
|
||||
col.prop(netsettings, "job_name", text="")
|
||||
col.prop(netsettings, "job_category", text="")
|
||||
|
||||
@ -214,18 +230,38 @@ class RENDER_PT_network_job(bpy.types.Panel, RenderButtonsPanel):
|
||||
row.prop(netsettings, "priority")
|
||||
row.prop(netsettings, "chunks")
|
||||
|
||||
class RENDER_PT_network_slaves(bpy.types.Panel, RenderButtonsPanel):
|
||||
bl_label = "Slaves Status"
|
||||
class RENDER_PT_network_job_vcs(NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "VCS Job Settings"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
return (super().poll(context)
|
||||
and scene.network_render.mode == "RENDER_CLIENT"
|
||||
and scene.network_render.job_type == "JOB_VCS")
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
if netsettings.mode != "RENDER_CLIENT":
|
||||
return False
|
||||
verify_address(netsettings)
|
||||
return super(RENDER_PT_network_slaves, cls).poll(context) and netsettings.server_address != "[default]"
|
||||
|
||||
layout.operator("render.netclientvcsguess", icon='FILE_REFRESH', text="")
|
||||
|
||||
layout.prop(netsettings, "vcs_system")
|
||||
layout.prop(netsettings, "vcs_revision")
|
||||
layout.prop(netsettings, "vcs_rpath")
|
||||
layout.prop(netsettings, "vcs_wpath")
|
||||
|
||||
class RENDER_PT_network_slaves(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Slaves Status"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
netsettings = context.scene.network_render
|
||||
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -240,9 +276,7 @@ class RENDER_PT_network_slaves(bpy.types.Panel, RenderButtonsPanel):
|
||||
sub.operator("render.netclientslaves", icon='FILE_REFRESH', text="")
|
||||
sub.operator("render.netclientblacklistslave", icon='ZOOMOUT', text="")
|
||||
|
||||
init_data(netsettings)
|
||||
|
||||
if netsettings.active_slave_index >= 0 and len(netsettings.slaves) > 0:
|
||||
if len(netrender.slaves) > netsettings.active_slave_index >= 0:
|
||||
layout.separator()
|
||||
|
||||
slave = netrender.slaves[netsettings.active_slave_index]
|
||||
@ -252,18 +286,14 @@ class RENDER_PT_network_slaves(bpy.types.Panel, RenderButtonsPanel):
|
||||
layout.label(text="Seen: " + time.ctime(slave.last_seen))
|
||||
layout.label(text="Stats: " + slave.stats)
|
||||
|
||||
class RENDER_PT_network_slaves_blacklist(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_slaves_blacklist(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Slaves Blacklist"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
if netsettings.mode != "RENDER_CLIENT":
|
||||
return False
|
||||
verify_address(netsettings)
|
||||
return super(RENDER_PT_network_slaves_blacklist, cls).poll(context) and netsettings.server_address != "[default]"
|
||||
netsettings = context.scene.network_render
|
||||
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -277,9 +307,7 @@ class RENDER_PT_network_slaves_blacklist(bpy.types.Panel, RenderButtonsPanel):
|
||||
sub = row.column(align=True)
|
||||
sub.operator("render.netclientwhitelistslave", icon='ZOOMOUT', text="")
|
||||
|
||||
init_data(netsettings)
|
||||
|
||||
if netsettings.active_blacklisted_slave_index >= 0 and len(netsettings.slaves_blacklist) > 0:
|
||||
if len(netrender.blacklist) > netsettings.active_blacklisted_slave_index >= 0:
|
||||
layout.separator()
|
||||
|
||||
slave = netrender.blacklist[netsettings.active_blacklisted_slave_index]
|
||||
@ -289,18 +317,14 @@ class RENDER_PT_network_slaves_blacklist(bpy.types.Panel, RenderButtonsPanel):
|
||||
layout.label(text="Seen: " + time.ctime(slave.last_seen))
|
||||
layout.label(text="Stats: " + slave.stats)
|
||||
|
||||
class RENDER_PT_network_jobs(bpy.types.Panel, RenderButtonsPanel):
|
||||
class RENDER_PT_network_jobs(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Jobs"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
scene = context.scene
|
||||
netsettings = scene.network_render
|
||||
if netsettings.mode != "RENDER_CLIENT":
|
||||
return False
|
||||
verify_address(netsettings)
|
||||
return super(RENDER_PT_network_jobs, cls).poll(context) and netsettings.server_address != "[default]"
|
||||
netsettings = context.scene.network_render
|
||||
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
@ -317,9 +341,7 @@ class RENDER_PT_network_jobs(bpy.types.Panel, RenderButtonsPanel):
|
||||
sub.operator("render.netclientcancelall", icon='PANEL_CLOSE', text="")
|
||||
sub.operator("render.netclientdownload", icon='RENDER_ANIMATION', text="")
|
||||
|
||||
init_data(netsettings)
|
||||
|
||||
if netsettings.active_job_index >= 0 and len(netsettings.jobs) > 0:
|
||||
if len(netrender.jobs) > netsettings.active_job_index >= 0:
|
||||
layout.separator()
|
||||
|
||||
job = netrender.jobs[netsettings.active_job_index]
|
||||
@ -329,16 +351,33 @@ class RENDER_PT_network_jobs(bpy.types.Panel, RenderButtonsPanel):
|
||||
layout.label(text="Done: %04i" % job.results[DONE])
|
||||
layout.label(text="Error: %04i" % job.results[ERROR])
|
||||
|
||||
class NetRenderSettings(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
import properties_render
|
||||
class RENDER_PT_network_output(NeedValidAddress, NetRenderButtonsPanel, bpy.types.Panel):
|
||||
bl_label = "Output"
|
||||
COMPAT_ENGINES = {'NET_RENDER'}
|
||||
|
||||
class NetRenderSlave(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
netsettings = context.scene.network_render
|
||||
return super().poll(context) and netsettings.mode == "RENDER_CLIENT"
|
||||
|
||||
draw = properties_render.RENDER_PT_output.draw
|
||||
|
||||
class NetRenderJob(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
|
||||
def addProperties():
|
||||
class NetRenderSettings(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
|
||||
class NetRenderSlave(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
|
||||
class NetRenderJob(bpy.types.IDPropertyGroup):
|
||||
pass
|
||||
|
||||
bpy.utils.register_class(NetRenderSettings)
|
||||
bpy.utils.register_class(NetRenderSlave)
|
||||
bpy.utils.register_class(NetRenderJob)
|
||||
|
||||
from bpy.props import PointerProperty, StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty
|
||||
bpy.types.Scene.network_render = PointerProperty(type=NetRenderSettings, name="Network Render", description="Network Render Settings")
|
||||
|
||||
@ -397,6 +436,16 @@ def addProperties():
|
||||
default = default_path,
|
||||
subtype='FILE_PATH')
|
||||
|
||||
NetRenderSettings.job_type = EnumProperty(
|
||||
items=(
|
||||
("JOB_BLENDER", "Blender", "Standard Blender Job"),
|
||||
("JOB_PROCESS", "Process", "Custom Process Job"),
|
||||
("JOB_VCS", "VCS", "Version Control System Managed Job"),
|
||||
),
|
||||
name="Job Type",
|
||||
description="Type of render job",
|
||||
default="JOB_BLENDER")
|
||||
|
||||
NetRenderSettings.job_name = StringProperty(
|
||||
name="Job name",
|
||||
description="Name of the job",
|
||||
@ -423,6 +472,30 @@ def addProperties():
|
||||
min=1,
|
||||
max=10)
|
||||
|
||||
NetRenderSettings.vcs_wpath = StringProperty(
|
||||
name="Working Copy",
|
||||
description="Path of the local working copy",
|
||||
maxlen = 1024,
|
||||
default = "")
|
||||
|
||||
NetRenderSettings.vcs_rpath = StringProperty(
|
||||
name="Remote Path",
|
||||
description="Path of the server copy (protocol specific)",
|
||||
maxlen = 1024,
|
||||
default = "")
|
||||
|
||||
NetRenderSettings.vcs_revision = StringProperty(
|
||||
name="Revision",
|
||||
description="Revision for this job",
|
||||
maxlen = 256,
|
||||
default = "")
|
||||
|
||||
NetRenderSettings.vcs_system = StringProperty(
|
||||
name="VCS",
|
||||
description="Version Control System",
|
||||
maxlen = 64,
|
||||
default = "Subversion")
|
||||
|
||||
NetRenderSettings.job_id = StringProperty(
|
||||
name="Network job id",
|
||||
description="id of the last sent render job",
|
||||
|
@ -28,7 +28,7 @@ try:
|
||||
except:
|
||||
bpy = None
|
||||
|
||||
VERSION = bytes("0.9", encoding='utf8')
|
||||
VERSION = bytes("1.3", encoding='utf8')
|
||||
|
||||
# Jobs status
|
||||
JOB_WAITING = 0 # before all data has been entered
|
||||
@ -57,6 +57,39 @@ FRAME_STATUS_TEXT = {
|
||||
ERROR: "Error"
|
||||
}
|
||||
|
||||
class DirectoryContext:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def __enter__(self):
|
||||
self.curdir = os.path.abspath(os.curdir)
|
||||
os.chdir(self.path)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
os.chdir(self.curdir)
|
||||
|
||||
class BreakableIncrementedSleep:
|
||||
def __init__(self, increment, default_timeout, max_timeout, break_fct):
|
||||
self.increment = increment
|
||||
self.default = default_timeout
|
||||
self.max = max_timeout
|
||||
self.current = self.default
|
||||
self.break_fct = break_fct
|
||||
|
||||
def reset(self):
|
||||
self.current = self.default
|
||||
|
||||
def increase(self):
|
||||
self.current = min(self.current + self.increment, self.max)
|
||||
|
||||
def sleep(self):
|
||||
for i in range(self.current):
|
||||
time.sleep(1)
|
||||
if self.break_fct():
|
||||
break
|
||||
|
||||
self.increase()
|
||||
|
||||
def responseStatus(conn):
|
||||
response = conn.getresponse()
|
||||
response.read()
|
||||
@ -97,7 +130,7 @@ def clientScan(report = None):
|
||||
|
||||
return ("", 8000) # return default values
|
||||
|
||||
def clientConnection(address, port, report = None, scan = True):
|
||||
def clientConnection(address, port, report = None, scan = True, timeout = 5):
|
||||
if address == "[default]":
|
||||
# calling operator from python is fucked, scene isn't in context
|
||||
# if bpy:
|
||||
@ -111,7 +144,7 @@ def clientConnection(address, port, report = None, scan = True):
|
||||
return None
|
||||
|
||||
try:
|
||||
conn = http.client.HTTPConnection(address, port, timeout = 5)
|
||||
conn = http.client.HTTPConnection(address, port, timeout = timeout)
|
||||
|
||||
if conn:
|
||||
if clientVerifyVersion(conn):
|
||||
@ -119,12 +152,13 @@ def clientConnection(address, port, report = None, scan = True):
|
||||
else:
|
||||
conn.close()
|
||||
reporting(report, "Incorrect master version", ValueError)
|
||||
except Exception as err:
|
||||
except BaseException as err:
|
||||
if report:
|
||||
report('ERROR', str(err))
|
||||
return None
|
||||
else:
|
||||
raise
|
||||
print(err)
|
||||
return None
|
||||
|
||||
def clientVerifyVersion(conn):
|
||||
conn.request("GET", "/version")
|
||||
@ -168,7 +202,10 @@ def hashData(data):
|
||||
|
||||
|
||||
def prefixPath(prefix_directory, file_path, prefix_path, force = False):
|
||||
if os.path.isabs(file_path):
|
||||
if (os.path.isabs(file_path) or
|
||||
len(file_path) >= 3 and (file_path[1:3] == ":/" or file_path[1:3] == ":\\") or # Windows absolute path don't count as absolute on unix, have to handle them myself
|
||||
file_path[0] == "/" or file_path[0] == "\\"): # and vice versa
|
||||
|
||||
# if an absolute path, make sure path exists, if it doesn't, use relative local path
|
||||
full_path = file_path
|
||||
if force or not os.path.exists(full_path):
|
||||
@ -185,12 +222,69 @@ def prefixPath(prefix_directory, file_path, prefix_path, force = False):
|
||||
else:
|
||||
full_path = os.path.join(prefix_directory, n)
|
||||
else:
|
||||
full_path = (prefix_directory, file_path)
|
||||
full_path = os.path.join(prefix_directory, file_path)
|
||||
|
||||
return full_path
|
||||
|
||||
def getResults(server_address, server_port, job_id, resolution_x, resolution_y, resolution_percentage, frame_ranges):
|
||||
if bpy.app.debug:
|
||||
print("=============================================")
|
||||
print("============= FETCHING RESULTS ==============")
|
||||
|
||||
frame_arguments = []
|
||||
for r in frame_ranges:
|
||||
if len(r) == 2:
|
||||
frame_arguments.extend(["-s", str(r[0]), "-e", str(r[1]), "-a"])
|
||||
else:
|
||||
frame_arguments.extend(["-f", str(r[0])])
|
||||
|
||||
filepath = os.path.join(bpy.app.tempdir, "netrender_temp.blend")
|
||||
bpy.ops.wm.save_as_mainfile(filepath=filepath, copy=True, check_existing=False)
|
||||
|
||||
arguments = [sys.argv[0], "-b", "-noaudio", filepath, "-o", bpy.path.abspath(bpy.context.scene.render.filepath), "-P", __file__] + frame_arguments + ["--", "GetResults", server_address, str(server_port), job_id, str(resolution_x), str(resolution_y), str(resolution_percentage)]
|
||||
if bpy.app.debug:
|
||||
print("Starting subprocess:")
|
||||
print(" ".join(arguments))
|
||||
|
||||
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
while process.poll() is None:
|
||||
stdout = process.stdout.read(1024)
|
||||
if bpy.app.debug:
|
||||
print(str(stdout, encoding='utf-8'), end="")
|
||||
|
||||
|
||||
# read leftovers if needed
|
||||
stdout = process.stdout.read()
|
||||
if bpy.app.debug:
|
||||
print(str(stdout, encoding='utf-8'))
|
||||
|
||||
os.remove(filepath)
|
||||
|
||||
if bpy.app.debug:
|
||||
print("=============================================")
|
||||
return
|
||||
|
||||
def _getResults(server_address, server_port, job_id, resolution_x, resolution_y, resolution_percentage):
|
||||
render = bpy.context.scene.render
|
||||
|
||||
netsettings = bpy.context.scene.network_render
|
||||
|
||||
netsettings.server_address = server_address
|
||||
netsettings.server_port = int(server_port)
|
||||
netsettings.job_id = job_id
|
||||
|
||||
render.engine = 'NET_RENDER'
|
||||
render.resolution_x = int(resolution_x)
|
||||
render.resolution_y = int(resolution_y)
|
||||
render.resolution_percentage = int(resolution_percentage)
|
||||
|
||||
render.use_full_sample = False
|
||||
render.use_compositing = False
|
||||
render.use_border = False
|
||||
|
||||
|
||||
def getFileInfo(filepath, infos):
|
||||
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", filepath, "-P", __file__, "--"] + infos, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
process = subprocess.Popen([sys.argv[0], "-b", "-noaudio", filepath, "-P", __file__, "--", "FileInfo"] + infos, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout = bytes()
|
||||
while process.poll() is None:
|
||||
stdout += process.stdout.read(1024)
|
||||
@ -203,33 +297,17 @@ def getFileInfo(filepath, infos):
|
||||
values = [eval(v[1:].strip()) for v in stdout.split("\n") if v.startswith("$")]
|
||||
|
||||
return values
|
||||
|
||||
def thumbnail(filename):
|
||||
root = os.path.splitext(filename)[0]
|
||||
imagename = os.path.split(filename)[1]
|
||||
thumbname = root + ".jpg"
|
||||
|
||||
if os.path.exists(thumbname):
|
||||
return thumbname
|
||||
|
||||
if bpy:
|
||||
scene = bpy.data.scenes[0] # FIXME, this is dodgy!
|
||||
scene.render.file_format = "JPEG"
|
||||
scene.render.file_quality = 90
|
||||
bpy.ops.image.open(filepath=filename)
|
||||
img = bpy.data.images[imagename]
|
||||
img.save_render(thumbname, scene=scene)
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(["convert", thumbname, "-resize", "300x300", thumbname])
|
||||
process.wait()
|
||||
return thumbname
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import bpy
|
||||
for info in sys.argv[7:]:
|
||||
print("$", eval(info))
|
||||
try:
|
||||
start = sys.argv.index("--") + 1
|
||||
except ValueError:
|
||||
start = 0
|
||||
action, *args = sys.argv[start:]
|
||||
|
||||
if action == "FileInfo":
|
||||
for info in args:
|
||||
print("$", eval(info))
|
||||
elif action == "GetResults":
|
||||
_getResults(args[0], args[1], args[2], args[3], args[4], args[5])
|
||||
|
72
release/scripts/io/netrender/versioning.py
Normal file
@ -0,0 +1,72 @@
|
||||
import sys, os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from netrender.utils import *
|
||||
|
||||
class AbstractVCS:
|
||||
name = "ABSTRACT VCS"
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def update(self, info):
|
||||
"""update(info)
|
||||
Update a working copy to the specified revision.
|
||||
If working copy doesn't exist, do a full get from server to create it.
|
||||
[info] model.VersioningInfo instance, specifies the working path, remote path and version number."""
|
||||
pass
|
||||
|
||||
def revision(self, path):
|
||||
"""revision(path)
|
||||
return the current revision of the specified working copy path"""
|
||||
pass
|
||||
|
||||
def path(self, path):
|
||||
"""path(path)
|
||||
return the remote path of the specified working copy path"""
|
||||
pass
|
||||
|
||||
class Subversion(AbstractVCS):
|
||||
name = "Subversion"
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.version_exp = re.compile("([0-9]*)")
|
||||
self.path_exp = re.compile("URL: (.*)")
|
||||
|
||||
def update(self, info):
|
||||
if not os.path.exists(info.wpath):
|
||||
base, folder = os.path.split(info.wpath)
|
||||
|
||||
with DirectoryContext(base):
|
||||
subprocess.call(["svn", "co", "%s@%s" % (info.rpath, str(info.revision)), folder])
|
||||
else:
|
||||
with DirectoryContext(info.wpath):
|
||||
subprocess.call(["svn", "up", "--accept", "theirs-full", "-r", str(info.revision)])
|
||||
|
||||
def revision(self, path):
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
with DirectoryContext(path):
|
||||
stdout = subprocess.check_output(["svnversion"])
|
||||
|
||||
match = self.version_exp.match(str(stdout, encoding="utf-8"))
|
||||
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
def path(self, path):
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
with DirectoryContext(path):
|
||||
stdout = subprocess.check_output(["svn", "info"])
|
||||
|
||||
match = self.path_exp.search(str(stdout, encoding="utf-8"))
|
||||
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
SYSTEMS = {
|
||||
Subversion.name: Subversion()
|
||||
}
|
@ -1,7 +1,29 @@
|
||||
# Built-In Keying Sets
|
||||
# None of these Keying Sets should be removed, as these
|
||||
# are needed by various parts of Blender in order for them
|
||||
# to work correctly.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
"""
|
||||
Built-In Keying Sets
|
||||
None of these Keying Sets should be removed, as these
|
||||
are needed by various parts of Blender in order for them
|
||||
to work correctly.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
from keyingsets_utils import *
|
||||
@ -9,6 +31,7 @@ from keyingsets_utils import *
|
||||
###############################
|
||||
# Built-In KeyingSets
|
||||
|
||||
|
||||
# Location
|
||||
class BUILTIN_KSI_Location(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Location"
|
||||
@ -19,9 +42,10 @@ class BUILTIN_KSI_Location(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for location
|
||||
generate = RKS_GEN_location
|
||||
|
||||
|
||||
# Rotation
|
||||
class BUILTIN_KSI_Rotation(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Rotation"
|
||||
@ -32,9 +56,10 @@ class BUILTIN_KSI_Rotation(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for location
|
||||
generate = RKS_GEN_rotation
|
||||
|
||||
|
||||
# Scale
|
||||
class BUILTIN_KSI_Scaling(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Scaling"
|
||||
@ -45,11 +70,12 @@ class BUILTIN_KSI_Scaling(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for location
|
||||
generate = RKS_GEN_scaling
|
||||
|
||||
# ------------
|
||||
|
||||
|
||||
# LocRot
|
||||
class BUILTIN_KSI_LocRot(bpy.types.KeyingSetInfo):
|
||||
bl_label = "LocRot"
|
||||
@ -60,13 +86,14 @@ class BUILTIN_KSI_LocRot(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator
|
||||
def generate(self, context, ks, data):
|
||||
# location
|
||||
RKS_GEN_location(self, context, ks, data)
|
||||
# rotation
|
||||
RKS_GEN_rotation(self, context, ks, data)
|
||||
|
||||
|
||||
# LocScale
|
||||
class BUILTIN_KSI_LocScale(bpy.types.KeyingSetInfo):
|
||||
bl_label = "LocScale"
|
||||
@ -77,13 +104,14 @@ class BUILTIN_KSI_LocScale(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator
|
||||
def generate(self, context, ks, data):
|
||||
# location
|
||||
RKS_GEN_location(self, context, ks, data)
|
||||
# scale
|
||||
RKS_GEN_scaling(self, context, ks, data)
|
||||
|
||||
|
||||
# LocRotScale
|
||||
class BUILTIN_KSI_LocRotScale(bpy.types.KeyingSetInfo):
|
||||
bl_label = "LocRotScale"
|
||||
@ -94,7 +122,7 @@ class BUILTIN_KSI_LocRotScale(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator
|
||||
def generate(self, context, ks, data):
|
||||
# location
|
||||
RKS_GEN_location(self, context, ks, data)
|
||||
@ -103,6 +131,7 @@ class BUILTIN_KSI_LocRotScale(bpy.types.KeyingSetInfo):
|
||||
# scale
|
||||
RKS_GEN_scaling(self, context, ks, data)
|
||||
|
||||
|
||||
# RotScale
|
||||
class BUILTIN_KSI_RotScale(bpy.types.KeyingSetInfo):
|
||||
bl_label = "RotScale"
|
||||
@ -113,20 +142,21 @@ class BUILTIN_KSI_RotScale(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator
|
||||
def generate(self, context, ks, data):
|
||||
# rotation
|
||||
RKS_GEN_rotation(self, context, ks, data)
|
||||
# scaling
|
||||
RKS_GEN_scaling(self, context, ks, data)
|
||||
|
||||
|
||||
# ------------
|
||||
|
||||
|
||||
# Location
|
||||
class BUILTIN_KSI_VisualLoc(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Visual Location"
|
||||
|
||||
insertkey_visual = True
|
||||
bl_options = {'INSERTKEY_VISUAL'}
|
||||
|
||||
# poll - use predefined callback for selected bones/objects
|
||||
poll = RKS_POLL_selected_items
|
||||
@ -134,9 +164,10 @@ class BUILTIN_KSI_VisualLoc(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for location
|
||||
generate = RKS_GEN_location
|
||||
|
||||
|
||||
# Rotation
|
||||
class BUILTIN_KSI_VisualRot(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Visual Rotation"
|
||||
@ -149,9 +180,10 @@ class BUILTIN_KSI_VisualRot(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for rotation
|
||||
generate = RKS_GEN_rotation
|
||||
|
||||
|
||||
# VisualLocRot
|
||||
class BUILTIN_KSI_VisualLocRot(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Visual LocRot"
|
||||
@ -164,7 +196,7 @@ class BUILTIN_KSI_VisualLocRot(bpy.types.KeyingSetInfo):
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator
|
||||
def generate(self, context, ks, data):
|
||||
# location
|
||||
RKS_GEN_location(self, context, ks, data)
|
||||
@ -173,53 +205,163 @@ class BUILTIN_KSI_VisualLocRot(bpy.types.KeyingSetInfo):
|
||||
|
||||
# ------------
|
||||
|
||||
|
||||
# Available
|
||||
class BUILTIN_KSI_Available(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Available"
|
||||
|
||||
# poll - use predefined callback for selected objects
|
||||
# TODO: this should really check whether the selected object (or datablock)
|
||||
# TODO: this should really check whether the selected object (or datablock)
|
||||
# has any animation data defined yet
|
||||
poll = RKS_POLL_selected_objects
|
||||
|
||||
# iterator - use callback for selected bones/objects
|
||||
iterator = RKS_ITER_selected_item
|
||||
|
||||
# generator - use callback for location
|
||||
# generator - use callback for doing this
|
||||
generate = RKS_GEN_available
|
||||
|
||||
###############################
|
||||
###############################
|
||||
|
||||
classes = [
|
||||
BUILTIN_KSI_Location,
|
||||
BUILTIN_KSI_Rotation,
|
||||
BUILTIN_KSI_Scaling,
|
||||
|
||||
BUILTIN_KSI_LocRot,
|
||||
BUILTIN_KSI_LocScale,
|
||||
BUILTIN_KSI_LocRotScale,
|
||||
BUILTIN_KSI_RotScale,
|
||||
# All properties that are likely to get animated in a character rig
|
||||
class BUILTIN_KSI_WholeCharacter(bpy.types.KeyingSetInfo):
|
||||
bl_label = "Whole Character"
|
||||
|
||||
BUILTIN_KSI_VisualLoc,
|
||||
BUILTIN_KSI_VisualRot,
|
||||
BUILTIN_KSI_VisualLocRot,
|
||||
# these prefixes should be avoided, as they are not really bones
|
||||
# that animators should be touching (or need to touch)
|
||||
badBonePrefixes = (
|
||||
'DEF',
|
||||
'GEO',
|
||||
'MCH',
|
||||
'ORG',
|
||||
'COR',
|
||||
'VIS',
|
||||
# ... more can be added here as you need in your own rigs ...
|
||||
)
|
||||
|
||||
BUILTIN_KSI_Available,
|
||||
]
|
||||
# poll - pose-mode on active object only
|
||||
def poll(ksi, context):
|
||||
return ((context.active_object) and (context.active_object.pose) and
|
||||
(context.active_object.mode == 'POSE'))
|
||||
|
||||
# iterator - all bones regardless of selection
|
||||
def iterator(ksi, context, ks):
|
||||
for bone in context.active_object.pose.bones:
|
||||
if not bone.name.startswith(BUILTIN_KSI_WholeCharacter.badBonePrefixes):
|
||||
ksi.generate(context, ks, bone)
|
||||
|
||||
# generator - all unlocked bone transforms + custom properties
|
||||
def generate(ksi, context, ks, bone):
|
||||
# loc, rot, scale - only include unlocked ones
|
||||
ksi.doLoc(ks, bone)
|
||||
|
||||
if bone.rotation_mode in ('QUATERNION', 'AXIS_ANGLE'):
|
||||
ksi.doRot4d(ks, bone)
|
||||
else:
|
||||
ksi.doRot3d(ks, bone)
|
||||
ksi.doScale(ks, bone)
|
||||
|
||||
# custom props?
|
||||
ksi.doCustomProps(ks, bone)
|
||||
|
||||
# ----------------
|
||||
|
||||
# helper to add some bone's property to the Keying Set
|
||||
def addProp(ksi, ks, bone, prop, index=-1, use_groups=True):
|
||||
# add the property name to the base path
|
||||
id_path = bone.path_from_id()
|
||||
id_block = bone.id_data
|
||||
|
||||
if prop.startswith('['):
|
||||
# custom properties
|
||||
path = id_path + prop
|
||||
else:
|
||||
# standard transforms/properties
|
||||
path = path_add_property(id_path, prop)
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if use_groups:
|
||||
ks.paths.add(id_block, path, index, group_method='NAMED', group_name=bone.name)
|
||||
else:
|
||||
ks.paths.add(id_block, path, index)
|
||||
|
||||
# ----------------
|
||||
|
||||
# location properties
|
||||
def doLoc(ksi, ks, bone):
|
||||
if bone.lock_location == (False, False, False):
|
||||
ksi.addProp(ks, bone, "location")
|
||||
else:
|
||||
for i in range(3):
|
||||
if not bone.lock_location[i]:
|
||||
ksi.addProp(ks, bone, "location", i)
|
||||
|
||||
# rotation properties
|
||||
def doRot4d(ksi, ks, bone):
|
||||
# rotation mode affects the property used
|
||||
if bone.rotation_mode == 'QUATERNION':
|
||||
prop = "rotation_quaternion"
|
||||
elif bone.rotation_mode == 'AXIS_ANGLE':
|
||||
prop = "rotation_axis_angle"
|
||||
|
||||
# add rotation properties if they will
|
||||
if bone.lock_rotations_4d:
|
||||
# can check individually
|
||||
if (bone.lock_rotation == (False, False, False)) and (bone.lock_rotation_w == False):
|
||||
ksi.addProp(ks, bone, prop)
|
||||
else:
|
||||
if bone.lock_rotation_w == False:
|
||||
ksi.addProp(ks, bone, prop, 0) # w = 0
|
||||
|
||||
for i in range(3):
|
||||
if not bone.lock_rotation[i]:
|
||||
ksi.addProp(ks, bone, prop, i + 1) # i + 1, since here x,y,z = 1,2,3, and w=0
|
||||
elif True not in bone.lock_rotation:
|
||||
# if axis-angle rotations get locked as eulers, then it's too messy to allow anything
|
||||
# other than all open unless we keyframe the whole lot
|
||||
ksi.addProp(ks, bone, prop)
|
||||
|
||||
def doRot3d(ksi, ks, bone):
|
||||
if bone.lock_rotation == (False, False, False):
|
||||
ksi.addProp(ks, bone, "rotation_euler")
|
||||
else:
|
||||
for i in range(3):
|
||||
if not bone.lock_rotation[i]:
|
||||
ksi.addProp(ks, bone, "rotation_euler", i)
|
||||
|
||||
# scale properties
|
||||
def doScale(ksi, ks, bone):
|
||||
if bone.lock_scale == (0, 0, 0):
|
||||
ksi.addProp(ks, bone, "scale")
|
||||
else:
|
||||
for i in range(3):
|
||||
if not bone.lock_scale[i]:
|
||||
ksi.addProp(ks, bone, "scale", i)
|
||||
|
||||
# ----------------
|
||||
|
||||
# custom properties
|
||||
def doCustomProps(ksi, ks, bone):
|
||||
# go over all custom properties for bone
|
||||
for prop, val in bone.items():
|
||||
# ignore special "_RNA_UI" used for UI editing
|
||||
if prop == "_RNA_UI":
|
||||
continue
|
||||
|
||||
# for now, just add all of 'em
|
||||
ksi.addProp(ks, bone, '["%s"]' % (prop))
|
||||
|
||||
|
||||
def register():
|
||||
register = bpy.types.register
|
||||
for cls in classes:
|
||||
register(cls)
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
unregister = bpy.types.unregister
|
||||
for cls in classes:
|
||||
unregister(cls)
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
||||
###############################
|
||||
###############################
|
||||
|
@ -23,28 +23,47 @@ import mathutils
|
||||
|
||||
|
||||
def add_object_align_init(context, operator):
|
||||
space_data = context.space_data
|
||||
if space_data.type != 'VIEW_3D':
|
||||
space_data = None
|
||||
|
||||
if operator and operator.properties.is_property_set("location") and operator.properties.is_property_set("rotation"):
|
||||
# location
|
||||
if operator and operator.properties.is_property_set("location"):
|
||||
location = mathutils.Matrix.Translation(mathutils.Vector(operator.properties.location))
|
||||
rotation = mathutils.Euler(operator.properties.rotation).to_matrix().resize4x4()
|
||||
else:
|
||||
# TODO, local view cursor!
|
||||
location = mathutils.Matrix.Translation(context.scene.cursor_location)
|
||||
if space_data: # local view cursor is detected below
|
||||
location = mathutils.Matrix.Translation(space_data.cursor_location)
|
||||
else:
|
||||
location = mathutils.Matrix.Translation(context.scene.cursor_location)
|
||||
|
||||
if context.user_preferences.edit.object_align == 'VIEW' and context.space_data.type == 'VIEW_3D':
|
||||
rotation = context.space_data.region_3d.view_matrix.rotation_part().invert().resize4x4()
|
||||
if operator:
|
||||
operator.properties.location = location.to_translation()
|
||||
|
||||
# rotation
|
||||
view_align = (context.user_preferences.edit.object_align == 'VIEW')
|
||||
view_align_force = False
|
||||
if operator:
|
||||
if operator.properties.is_property_set("view_align"):
|
||||
view_align = view_align_force = operator.view_align
|
||||
else:
|
||||
operator.properties.view_align = view_align
|
||||
|
||||
if operator and operator.properties.is_property_set("rotation") and not view_align_force:
|
||||
rotation = mathutils.Euler(operator.properties.rotation).to_matrix().to_4x4()
|
||||
else:
|
||||
if view_align and space_data:
|
||||
rotation = space_data.region_3d.view_matrix.to_3x3().inverted().to_4x4()
|
||||
else:
|
||||
rotation = mathutils.Matrix()
|
||||
|
||||
# set the operator properties
|
||||
if operator:
|
||||
operator.properties.location = location.translation_part()
|
||||
operator.properties.rotation = rotation.to_euler()
|
||||
|
||||
return location * rotation
|
||||
|
||||
|
||||
def add_object_data(context, obdata, operator=None):
|
||||
def object_data_add(context, obdata, operator=None):
|
||||
|
||||
scene = context.scene
|
||||
|
||||
@ -65,6 +84,7 @@ def add_object_data(context, obdata, operator=None):
|
||||
obj_act = scene.objects.active
|
||||
|
||||
if obj_act and obj_act.mode == 'EDIT' and obj_act.type == obj_new.type:
|
||||
bpy.ops.mesh.select_all(action='DESELECT')
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
obj_act.select = True
|
||||
|
@ -22,11 +22,12 @@
|
||||
This module has utility functions for renaming
|
||||
rna values in fcurves and drivers.
|
||||
|
||||
The main function to use is: update_data_paths(...)
|
||||
The main function to use is: update_data_paths(...)
|
||||
"""
|
||||
|
||||
IS_TESTING = False
|
||||
|
||||
|
||||
class DataPathBuilder(object):
|
||||
__slots__ = ("data_path", )
|
||||
""" Dummy class used to parse fcurve and driver data paths.
|
||||
@ -37,7 +38,7 @@ class DataPathBuilder(object):
|
||||
def __getattr__(self, attr):
|
||||
str_value = ".%s" % attr
|
||||
return DataPathBuilder(self.data_path + (str_value, ))
|
||||
|
||||
|
||||
def __getitem__(self, key):
|
||||
str_value = '["%s"]' % key
|
||||
return DataPathBuilder(self.data_path + (str_value, ))
|
||||
@ -51,7 +52,7 @@ class DataPathBuilder(object):
|
||||
if base is not Ellipsis:
|
||||
try:
|
||||
# this only works when running with an old blender
|
||||
# where the old path will resolve
|
||||
# where the old path will resolve
|
||||
base = eval("base" + item)
|
||||
except:
|
||||
base_new = Ellipsis
|
||||
@ -61,7 +62,7 @@ class DataPathBuilder(object):
|
||||
try:
|
||||
print("base." + item_new)
|
||||
base_new = eval("base." + item_new)
|
||||
break # found, dont keep looking
|
||||
break # found, dont keep looking
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -77,7 +78,7 @@ import bpy
|
||||
|
||||
def id_iter():
|
||||
type_iter = type(bpy.data.objects)
|
||||
|
||||
|
||||
for attr in dir(bpy.data):
|
||||
data_iter = getattr(bpy.data, attr, None)
|
||||
if type(data_iter) == type_iter:
|
||||
@ -111,16 +112,17 @@ def classes_recursive(base_type, clss=None):
|
||||
|
||||
|
||||
def find_path_new(id_data, data_path, rna_update_dict, rna_update_from_map):
|
||||
# note!, id_data can be ID type or a node tree
|
||||
# ignore ID props for now
|
||||
if data_path.startswith("["):
|
||||
return data_path
|
||||
|
||||
|
||||
# recursive path fixing, likely will be one in most cases.
|
||||
data_path_builder = eval("DataPathBuilder(tuple())." + data_path)
|
||||
data_resolve = data_path_builder.resolve(id_data, rna_update_from_map)
|
||||
|
||||
path_new = [pair[0] for pair in data_resolve]
|
||||
|
||||
|
||||
# print(data_resolve)
|
||||
data_base = id_data
|
||||
|
||||
@ -137,55 +139,60 @@ def find_path_new(id_data, data_path, rna_update_dict, rna_update_from_map):
|
||||
|
||||
# set this as the base for further properties
|
||||
data_base = data
|
||||
|
||||
data_path_new = "".join(path_new)[1:] # skip the first "."
|
||||
|
||||
data_path_new = "".join(path_new)[1:] # skip the first "."
|
||||
return data_path_new
|
||||
|
||||
|
||||
def update_data_paths(rna_update):
|
||||
''' rna_update triple [(class_name, from, to), ...]
|
||||
'''
|
||||
|
||||
|
||||
# make a faster lookup dict
|
||||
rna_update_dict = {}
|
||||
for ren_class, ren_from, ren_to in rna_update:
|
||||
rna_update_dict.setdefault(ren_class, {})[ren_from] = ren_to
|
||||
|
||||
|
||||
rna_update_from_map = {}
|
||||
for ren_class, ren_from, ren_to in rna_update:
|
||||
rna_update_from_map.setdefault(ren_from, []).append(ren_to)
|
||||
|
||||
for id_data in id_iter():
|
||||
anim_data = getattr(id_data, "animation_data", None)
|
||||
if anim_data is None:
|
||||
continue
|
||||
|
||||
for fcurve in anim_data.drivers:
|
||||
for var in fcurve.driver.variables:
|
||||
if var.type == 'SINGLE_PROP':
|
||||
for tar in var.targets:
|
||||
id_data_other = tar.id
|
||||
data_path = tar.data_path
|
||||
|
||||
if id_data_other and data_path:
|
||||
data_path_new = find_path_new(id_data_other, data_path, rna_update_dict, rna_update_from_map)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
tar.data_path = data_path_new
|
||||
print("driver (%s): %s -> %s" % (id_data_other.name, data_path, data_path_new))
|
||||
|
||||
|
||||
|
||||
for action in anim_data_actions(anim_data):
|
||||
for fcu in action.fcurves:
|
||||
data_path = fcu.data_path
|
||||
data_path_new = find_path_new(id_data, data_path, rna_update_dict, rna_update_from_map)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
fcu.data_path = data_path_new
|
||||
print("fcurve (%s): %s -> %s" % (id_data.name, data_path, data_path_new))
|
||||
|
||||
# check node-trees too
|
||||
anim_data_ls = [(id_data, getattr(id_data, "animation_data", None))]
|
||||
node_tree = getattr(id_data, "node_tree", None)
|
||||
if node_tree:
|
||||
anim_data_ls.append((node_tree, node_tree.animation_data))
|
||||
|
||||
for anim_data_base, anim_data in anim_data_ls:
|
||||
if anim_data is None:
|
||||
continue
|
||||
|
||||
for fcurve in anim_data.drivers:
|
||||
for var in fcurve.driver.variables:
|
||||
if var.type == 'SINGLE_PROP':
|
||||
for tar in var.targets:
|
||||
id_data_other = tar.id
|
||||
data_path = tar.data_path
|
||||
|
||||
if id_data_other and data_path:
|
||||
data_path_new = find_path_new(id_data_other, data_path, rna_update_dict, rna_update_from_map)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
tar.data_path = data_path_new
|
||||
print("driver (%s): %s -> %s" % (id_data_other.name, data_path, data_path_new))
|
||||
|
||||
for action in anim_data_actions(anim_data):
|
||||
for fcu in action.fcurves:
|
||||
data_path = fcu.data_path
|
||||
data_path_new = find_path_new(anim_data_base, data_path, rna_update_dict, rna_update_from_map)
|
||||
# print(data_path_new)
|
||||
if data_path_new != data_path:
|
||||
if not IS_TESTING:
|
||||
fcu.data_path = data_path_new
|
||||
print("fcurve (%s): %s -> %s" % (id_data.name, data_path, data_path_new))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -26,9 +26,8 @@ data = _bpy.data
|
||||
context = _bpy.context
|
||||
|
||||
# python modules
|
||||
from bpy import utils, path
|
||||
|
||||
from bpy import ops as _ops_module
|
||||
from . import utils, path
|
||||
from . import ops as _ops_module
|
||||
|
||||
# fake operator module
|
||||
ops = _ops_module.ops_fake_module
|
||||
@ -50,6 +49,10 @@ def _main():
|
||||
pydoc.Helper.getline = lambda self, prompt: None
|
||||
pydoc.TextDoc.use_bold = lambda self, text: text
|
||||
|
||||
# Possibly temp. addons path
|
||||
from os.path import join, dirname, normpath
|
||||
_sys.path.append(normpath(join(dirname(__file__), "..", "..", "addons", "modules")))
|
||||
|
||||
# if "-d" in sys.argv: # Enable this to measure startup speed
|
||||
if 0:
|
||||
import cProfile
|
||||
|
@ -115,14 +115,44 @@ class bpy_ops_submodule_op(object):
|
||||
def _get_doc(self):
|
||||
return op_as_string(self.idname())
|
||||
|
||||
@staticmethod
|
||||
def _parse_args(args):
|
||||
C_dict = None
|
||||
C_exec = 'EXEC_DEFAULT'
|
||||
|
||||
if len(args) == 0:
|
||||
pass
|
||||
elif len(args) == 1:
|
||||
if type(args[0]) != str:
|
||||
C_dict = args[0]
|
||||
else:
|
||||
C_exec = args[0]
|
||||
elif len(args) == 2:
|
||||
C_exec, C_dict = args
|
||||
else:
|
||||
raise ValueError("1 or 2 args execution context is supported")
|
||||
|
||||
return C_dict, C_exec
|
||||
|
||||
@staticmethod
|
||||
def _scene_update(context):
|
||||
scene = context.scene
|
||||
if scene: # None in backgroud mode
|
||||
scene.update()
|
||||
else:
|
||||
import bpy
|
||||
for scene in bpy.data.scenes:
|
||||
scene.update()
|
||||
|
||||
__doc__ = property(_get_doc)
|
||||
|
||||
def __init__(self, module, func):
|
||||
self.module = module
|
||||
self.func = func
|
||||
|
||||
def poll(self, context=None):
|
||||
return op_poll(self.idname_py(), context)
|
||||
def poll(self, *args):
|
||||
C_dict, C_exec = __class__._parse_args(args)
|
||||
return op_poll(self.idname_py(), C_dict, C_exec)
|
||||
|
||||
def idname(self):
|
||||
# submod.foo -> SUBMOD_OT_foo
|
||||
@ -133,42 +163,23 @@ class bpy_ops_submodule_op(object):
|
||||
return self.module + "." + self.func
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
import bpy
|
||||
context = bpy.context
|
||||
|
||||
# Get the operator from blender
|
||||
if len(args) > 2:
|
||||
raise ValueError("1 or 2 args execution context is supported")
|
||||
wm = context.window_manager
|
||||
|
||||
C_dict = None
|
||||
# run to account for any rna values the user changes.
|
||||
__class__._scene_update(context)
|
||||
|
||||
if args:
|
||||
|
||||
C_exec = 'EXEC_DEFAULT'
|
||||
|
||||
if len(args) == 2:
|
||||
C_exec = args[0]
|
||||
C_dict = args[1]
|
||||
else:
|
||||
if type(args[0]) != str:
|
||||
C_dict = args[0]
|
||||
else:
|
||||
C_exec = args[0]
|
||||
|
||||
if len(args) == 2:
|
||||
C_dict = args[1]
|
||||
|
||||
C_dict, C_exec = __class__._parse_args(args)
|
||||
ret = op_call(self.idname_py(), C_dict, kw, C_exec)
|
||||
|
||||
else:
|
||||
ret = op_call(self.idname_py(), C_dict, kw)
|
||||
ret = op_call(self.idname_py(), None, kw)
|
||||
|
||||
if 'FINISHED' in ret:
|
||||
import bpy
|
||||
scene = bpy.context.scene
|
||||
if scene: # None in backgroud mode
|
||||
scene.update()
|
||||
else:
|
||||
for scene in bpy.data.scenes:
|
||||
scene.update()
|
||||
if 'FINISHED' in ret and context.window_manager == wm:
|
||||
__class__._scene_update(context)
|
||||
|
||||
return ret
|
||||
|
||||
@ -182,8 +193,16 @@ class bpy_ops_submodule_op(object):
|
||||
import bpy
|
||||
idname = self.idname()
|
||||
as_string = op_as_string(idname)
|
||||
descr = getattr(bpy.types, idname).bl_rna.description
|
||||
return as_string + "\n" + descr
|
||||
op_class = getattr(bpy.types, idname)
|
||||
descr = op_class.bl_rna.description
|
||||
# XXX, workaround for not registering
|
||||
# every __doc__ to save time on load.
|
||||
if not descr:
|
||||
descr = op_class.__doc__
|
||||
if not descr:
|
||||
descr = ""
|
||||
|
||||
return "# %s\n%s" % (descr, as_string)
|
||||
|
||||
def __str__(self): # used for print(...)
|
||||
return "<function bpy.ops.%s.%s at 0x%x'>" % \
|
||||
|
@ -27,12 +27,15 @@ import bpy as _bpy
|
||||
import os as _os
|
||||
|
||||
|
||||
def abspath(path):
|
||||
def abspath(path, start=None):
|
||||
"""
|
||||
Returns the absolute path relative to the current blend file using the "//" prefix.
|
||||
|
||||
:arg start: Relative to this path, when not set the current filename is used.
|
||||
:type start: string
|
||||
"""
|
||||
if path.startswith("//"):
|
||||
return _os.path.join(_os.path.dirname(_bpy.data.filepath), path[2:])
|
||||
return _os.path.join(_os.path.dirname(_bpy.data.filepath if start is None else start), path[2:])
|
||||
|
||||
return path
|
||||
|
||||
@ -52,6 +55,17 @@ def relpath(path, start=None):
|
||||
return path
|
||||
|
||||
|
||||
def is_subdir(path, directory):
|
||||
"""
|
||||
Returns true if *path* in a subdirectory of *directory*.
|
||||
Both paths must be absolute.
|
||||
"""
|
||||
from os.path import normpath, normcase
|
||||
path = normpath(normcase(path))
|
||||
directory = normpath(normcase(directory))
|
||||
return path.startswith(directory)
|
||||
|
||||
|
||||
def clean_name(name, replace="_"):
|
||||
"""
|
||||
Returns a name with characters replaced that may cause problems under various circumstances, such as writing to a file.
|
||||
@ -99,6 +113,13 @@ def display_name(name):
|
||||
return name_base
|
||||
|
||||
|
||||
def display_name_from_filepath(name):
|
||||
"""
|
||||
Returns the path stripped of directort and extension, ensured to be utf8 compatible.
|
||||
"""
|
||||
return _os.path.splitext(_os.path.basename(name))[0].encode("utf8", "replace").decode("utf8")
|
||||
|
||||
|
||||
def resolve_ncase(path):
|
||||
"""
|
||||
Resolve a case insensitive path on a case sensitive system,
|
||||
@ -108,7 +129,7 @@ def resolve_ncase(path):
|
||||
import os
|
||||
|
||||
def _ncase_path_found(path):
|
||||
if path == "" or os.path.exists(path):
|
||||
if not path or os.path.exists(path):
|
||||
return path, True
|
||||
|
||||
filename = os.path.basename(path) # filename may be a directory or a file
|
||||
@ -192,7 +213,9 @@ def module_names(path, recursive=False):
|
||||
modules = []
|
||||
|
||||
for filename in sorted(_os.listdir(path)):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
if filename == "modules":
|
||||
pass # XXX, hard coded exception.
|
||||
elif filename.endswith(".py") and filename != "__init__.py":
|
||||
fullpath = join(path, filename)
|
||||
modules.append((filename[0:-3], fullpath))
|
||||
elif ("." not in filename):
|
||||
|
@ -23,12 +23,16 @@ This module contains utility functions specific to blender but
|
||||
not assosiated with blenders internal data.
|
||||
"""
|
||||
|
||||
import bpy as _bpy
|
||||
import os as _os
|
||||
import sys as _sys
|
||||
from _bpy import register_class
|
||||
from _bpy import unregister_class
|
||||
|
||||
from _bpy import blend_paths
|
||||
from _bpy import script_paths as _bpy_script_paths
|
||||
from _bpy import user_resource as _user_resource
|
||||
|
||||
import bpy as _bpy
|
||||
import os as _os
|
||||
import sys as _sys
|
||||
|
||||
|
||||
def _test_import(module_name, loaded_modules):
|
||||
@ -99,9 +103,6 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
import traceback
|
||||
import time
|
||||
|
||||
# must be set back to True on exits
|
||||
_bpy_types._register_immediate = False
|
||||
|
||||
t_main = time.time()
|
||||
|
||||
loaded_modules = set()
|
||||
@ -111,7 +112,6 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
|
||||
if reload_scripts:
|
||||
_bpy_types.TypeMap.clear()
|
||||
_bpy_types.PropertiesMap.clear()
|
||||
|
||||
# just unload, dont change user defaults, this means we can sync to reload.
|
||||
# note that they will only actually reload of the modification time changes.
|
||||
@ -120,7 +120,6 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
addon_disable(module_name, default_set=False)
|
||||
|
||||
def register_module_call(mod):
|
||||
_bpy_types._register_module(mod.__name__)
|
||||
register = getattr(mod, "register", None)
|
||||
if register:
|
||||
try:
|
||||
@ -131,7 +130,6 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
print("\nWarning! '%s' has no register function, this is now a requirement for registerable scripts." % mod.__file__)
|
||||
|
||||
def unregister_module_call(mod):
|
||||
_bpy_types._unregister_module(mod.__name__)
|
||||
unregister = getattr(mod, "unregister", None)
|
||||
if unregister:
|
||||
try:
|
||||
@ -140,6 +138,7 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
traceback.print_exc()
|
||||
|
||||
def test_reload(mod):
|
||||
import imp
|
||||
# reloading this causes internal errors
|
||||
# because the classes from this module are stored internally
|
||||
# possibly to refresh internal references too but for now, best not to.
|
||||
@ -147,7 +146,7 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
return mod
|
||||
|
||||
try:
|
||||
return reload(mod)
|
||||
return imp.reload(mod)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
@ -197,18 +196,14 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
|
||||
for mod in modules_from_path(path, loaded_modules):
|
||||
test_register(mod)
|
||||
|
||||
_bpy_types._register_immediate = True
|
||||
|
||||
# deal with addons seperately
|
||||
addon_reset_all()
|
||||
|
||||
addon_reset_all(reload_scripts)
|
||||
|
||||
# run the active integration preset
|
||||
filepath = preset_find(_bpy.context.user_preferences.inputs.active_keyconfig, "keyconfig")
|
||||
if filepath:
|
||||
keyconfig_set(filepath)
|
||||
|
||||
|
||||
if reload_scripts:
|
||||
import gc
|
||||
print("gc.collect() -> %d" % gc.collect())
|
||||
@ -339,7 +334,14 @@ def addon_check(module_name):
|
||||
loaded_default = module_name in _bpy.context.user_preferences.addons
|
||||
|
||||
mod = _sys.modules.get(module_name)
|
||||
loaded_state = mod and getattr(mod, "__addon_enabled__")
|
||||
loaded_state = mod and getattr(mod, "__addon_enabled__", Ellipsis)
|
||||
|
||||
if loaded_state is Ellipsis:
|
||||
print("Warning: addon-module %r found module but without"
|
||||
" __addon_enabled__ field, possible name collision from file: %r" %
|
||||
(module_name, getattr(mod, "__file__", "<unknown>")))
|
||||
|
||||
loaded_state = False
|
||||
|
||||
return loaded_default, loaded_state
|
||||
|
||||
@ -358,15 +360,11 @@ def addon_enable(module_name, default_set=True):
|
||||
import os
|
||||
import sys
|
||||
import bpy_types as _bpy_types
|
||||
|
||||
|
||||
_bpy_types._register_immediate = False
|
||||
import imp
|
||||
|
||||
def handle_error():
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
_bpy_types._register_immediate = True
|
||||
|
||||
|
||||
# reload if the mtime changes
|
||||
mod = sys.modules.get(module_name)
|
||||
@ -378,7 +376,7 @@ def addon_enable(module_name, default_set=True):
|
||||
print("module changed on disk:", mod.__file__, "reloading...")
|
||||
|
||||
try:
|
||||
reload(mod)
|
||||
imp.reload(mod)
|
||||
except:
|
||||
handle_error()
|
||||
del sys.modules[module_name]
|
||||
@ -396,19 +394,13 @@ def addon_enable(module_name, default_set=True):
|
||||
return None
|
||||
|
||||
# 2) try register collected modules
|
||||
try:
|
||||
_bpy_types._register_module(module_name)
|
||||
except:
|
||||
handle_error()
|
||||
del sys.modules[module_name]
|
||||
return None
|
||||
# removed, addons need to handle own registration now.
|
||||
|
||||
# 3) try run the modules register function
|
||||
try:
|
||||
mod.register()
|
||||
except:
|
||||
handle_error()
|
||||
_bpy_types._unregister_module(module_name)
|
||||
del sys.modules[module_name]
|
||||
return None
|
||||
|
||||
@ -419,12 +411,11 @@ def addon_enable(module_name, default_set=True):
|
||||
if not ext:
|
||||
ext = _bpy.context.user_preferences.addons.new()
|
||||
ext.module = module_name
|
||||
|
||||
_bpy_types._register_immediate = True
|
||||
|
||||
mod.__addon_enabled__ = True
|
||||
|
||||
print("\tbpy.utils.addon_enable", mod.__name__)
|
||||
if _bpy.app.debug:
|
||||
print("\tbpy.utils.addon_enable", mod.__name__)
|
||||
|
||||
return mod
|
||||
|
||||
@ -447,7 +438,6 @@ def addon_disable(module_name, default_set=True):
|
||||
mod.__addon_enabled__ = False
|
||||
|
||||
try:
|
||||
_bpy_types._unregister_module(module_name, free=False) # dont free because we may want to enable again.
|
||||
mod.unregister()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
@ -462,21 +452,37 @@ def addon_disable(module_name, default_set=True):
|
||||
addon = addons.get(module_name)
|
||||
if addon:
|
||||
addons.remove(addon)
|
||||
|
||||
print("\tbpy.utils.addon_disable", module_name)
|
||||
|
||||
if _bpy.app.debug:
|
||||
print("\tbpy.utils.addon_disable", module_name)
|
||||
|
||||
|
||||
def addon_reset_all():
|
||||
def addon_reset_all(reload_scripts=False):
|
||||
"""
|
||||
Sets the addon state based on the user preferences.
|
||||
"""
|
||||
import imp
|
||||
|
||||
paths = script_paths("addons") + script_paths("addons_contrib")
|
||||
# RELEASE SCRIPTS: official scripts distributed in Blender releases
|
||||
paths = script_paths("addons")
|
||||
|
||||
# CONTRIB SCRIPTS: good for testing but not official scripts yet
|
||||
paths += script_paths("addons_contrib")
|
||||
|
||||
# EXTERN SCRIPTS: external projects scripts
|
||||
paths += script_paths("addons_extern")
|
||||
|
||||
for path in paths:
|
||||
_sys_path_ensure(path)
|
||||
for mod_name, mod_path in _bpy.path.module_names(path):
|
||||
is_enabled, is_loaded = addon_check(mod_name)
|
||||
|
||||
# first check if reload is needed before changing state.
|
||||
if reload_scripts:
|
||||
mod = _sys.modules.get(mod_name)
|
||||
if mod:
|
||||
imp.reload(mod)
|
||||
|
||||
if is_enabled == is_loaded:
|
||||
pass
|
||||
elif is_enabled:
|
||||
@ -485,12 +491,13 @@ def addon_reset_all():
|
||||
print("\taddon_reset_all unloading", mod_name)
|
||||
addon_disable(mod_name)
|
||||
|
||||
|
||||
def preset_find(name, preset_path, display_name=False):
|
||||
if not name:
|
||||
return None
|
||||
|
||||
|
||||
for directory in preset_paths(preset_path):
|
||||
|
||||
|
||||
if display_name:
|
||||
filename = ""
|
||||
for fn in _os.listdir(directory):
|
||||
@ -533,8 +540,91 @@ def keyconfig_set(filepath):
|
||||
keyconfigs.remove(kc_dupe)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
kc_new.name = name
|
||||
keyconfigs.active = kc_new
|
||||
|
||||
|
||||
def user_resource(type, path="", create=False):
|
||||
"""
|
||||
Return a user resource path (normally from the users home directory).
|
||||
|
||||
:arg type: Resource type in ['DATAFILES', 'CONFIG', 'SCRIPTS', 'AUTOSAVE'].
|
||||
:type type: string
|
||||
:arg subdir: Optional subdirectory.
|
||||
:type subdir: string
|
||||
:arg create: Treat the path as a directory and create it if its not existing.
|
||||
:type create: boolean
|
||||
:return: a path.
|
||||
:rtype: string
|
||||
"""
|
||||
|
||||
target_path = _user_resource(type, path)
|
||||
|
||||
if create:
|
||||
# should always be true.
|
||||
if target_path:
|
||||
# create path if not existing.
|
||||
if not _os.path.exists(target_path):
|
||||
try:
|
||||
_os.makedirs(target_path)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
target_path = ""
|
||||
elif not _os.path.isdir(target_path):
|
||||
print("Path %r found but isn't a directory!" % target_path)
|
||||
target_path = ""
|
||||
|
||||
return target_path
|
||||
|
||||
|
||||
def _bpy_module_classes(module, is_registered=False):
|
||||
typemap_list = _bpy_types.TypeMap.get(module, ())
|
||||
i = 0
|
||||
while i < len(typemap_list):
|
||||
cls_weakref, path, line = typemap_list[i]
|
||||
cls = cls_weakref()
|
||||
|
||||
if cls is None:
|
||||
del typemap_list[i]
|
||||
else:
|
||||
if is_registered == ("bl_rna" in cls.__dict__):
|
||||
yield (cls, path, line)
|
||||
i += 1
|
||||
|
||||
|
||||
def register_module(module, verbose=False):
|
||||
import traceback
|
||||
if verbose:
|
||||
print("bpy.utils.register_module(%r): ..." % module)
|
||||
for cls, path, line in _bpy_module_classes(module, is_registered=False):
|
||||
if verbose:
|
||||
print(" %s of %s:%s" % (cls, path, line))
|
||||
try:
|
||||
register_class(cls)
|
||||
except:
|
||||
print("bpy.utils.register_module(): failed to registering class '%s.%s'" % (cls.__module__, cls.__name__))
|
||||
print("\t", path, "line", line)
|
||||
traceback.print_exc()
|
||||
if verbose:
|
||||
print("done.\n")
|
||||
if "cls" not in locals():
|
||||
raise Exception("register_module(%r): defines no classes" % module)
|
||||
|
||||
|
||||
def unregister_module(module, verbose=False):
|
||||
import traceback
|
||||
if verbose:
|
||||
print("bpy.utils.unregister_module(%r): ..." % module)
|
||||
for cls, path, line in _bpy_module_classes(module, is_registered=True):
|
||||
if verbose:
|
||||
print(" %s of %s:%s" % (cls, path, line))
|
||||
try:
|
||||
unregister_class(cls)
|
||||
except:
|
||||
print("bpy.utils.unregister_module(): failed to unregistering class '%s.%s'" % (cls.__module__, cls.__name__))
|
||||
print("\t", path, "line", line)
|
||||
traceback.print_exc()
|
||||
if verbose:
|
||||
print("done.\n")
|
@ -142,19 +142,19 @@ class _GenericBone:
|
||||
def x_axis(self):
|
||||
""" Vector pointing down the x-axis of the bone.
|
||||
"""
|
||||
return self.matrix.rotation_part() * Vector((1.0, 0.0, 0.0))
|
||||
return Vector((1.0, 0.0, 0.0)) * self.matrix.to_3x3()
|
||||
|
||||
@property
|
||||
def y_axis(self):
|
||||
""" Vector pointing down the x-axis of the bone.
|
||||
"""
|
||||
return self.matrix.rotation_part() * Vector((0.0, 1.0, 0.0))
|
||||
return Vector((0.0, 1.0, 0.0)) * self.matrix.to_3x3()
|
||||
|
||||
@property
|
||||
def z_axis(self):
|
||||
""" Vector pointing down the x-axis of the bone.
|
||||
"""
|
||||
return self.matrix.rotation_part() * Vector((0.0, 0.0, 1.0))
|
||||
return Vector((0.0, 0.0, 1.0)) * self.matrix.to_3x3()
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
@ -258,15 +258,15 @@ class _GenericBone:
|
||||
return bones
|
||||
|
||||
|
||||
class PoseBone(StructRNA, _GenericBone):
|
||||
class PoseBone(StructRNA, _GenericBone, metaclass=StructMetaIDProp):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class Bone(StructRNA, _GenericBone):
|
||||
class Bone(StructRNA, _GenericBone, metaclass=StructMetaIDProp):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class EditBone(StructRNA, _GenericBone):
|
||||
class EditBone(StructRNA, _GenericBone, metaclass=StructMetaIDProp):
|
||||
__slots__ = ()
|
||||
|
||||
def align_orientation(self, other):
|
||||
@ -284,13 +284,13 @@ class EditBone(StructRNA, _GenericBone):
|
||||
Expects a 4x4 or 3x3 matrix.
|
||||
"""
|
||||
from mathutils import Vector
|
||||
z_vec = self.matrix.rotation_part() * Vector((0.0, 0.0, 1.0))
|
||||
self.tail = matrix * self.tail
|
||||
self.head = matrix * self.head
|
||||
z_vec = Vector((0.0, 0.0, 1.0)) * self.matrix.to_3x3()
|
||||
self.tail = self.tail * matrix
|
||||
self.head = self.head * matrix
|
||||
scalar = matrix.median_scale
|
||||
self.head_radius *= scalar
|
||||
self.tail_radius *= scalar
|
||||
self.align_roll(matrix * z_vec)
|
||||
self.align_roll(z_vec * matrix)
|
||||
|
||||
|
||||
def ord_ind(i1, i2):
|
||||
@ -302,18 +302,25 @@ def ord_ind(i1, i2):
|
||||
class Mesh(bpy_types.ID):
|
||||
__slots__ = ()
|
||||
|
||||
def from_pydata(self, verts, edges, faces):
|
||||
def from_pydata(self, vertices, edges, faces):
|
||||
"""
|
||||
Make a mesh from a list of verts/edges/faces
|
||||
Until we have a nicer way to make geometry, use this.
|
||||
|
||||
:arg vertices: float triplets each representing (X, Y, Z) eg: [(0.0, 1.0, 0.5), ...].
|
||||
:type vertices: iterable object
|
||||
:arg edges: int pairs, each pair contains two indices to the *vertices* argument. eg: [(1, 2), ...]
|
||||
:type edges: iterable object
|
||||
:arg faces: iterator of faces, each faces contains three or four indices to the *vertices* argument. eg: [(5, 6, 8, 9), (1, 2, 3), ...]
|
||||
:type faces: iterable object
|
||||
"""
|
||||
self.vertices.add(len(verts))
|
||||
self.vertices.add(len(vertices))
|
||||
self.edges.add(len(edges))
|
||||
self.faces.add(len(faces))
|
||||
|
||||
verts_flat = [f for v in verts for f in v]
|
||||
self.vertices.foreach_set("co", verts_flat)
|
||||
del verts_flat
|
||||
vertices_flat = [f for v in vertices for f in v]
|
||||
self.vertices.foreach_set("co", vertices_flat)
|
||||
del vertices_flat
|
||||
|
||||
edges_flat = [i for e in edges for i in e]
|
||||
self.edges.foreach_set("vertices", edges_flat)
|
||||
@ -326,7 +333,7 @@ class Mesh(bpy_types.ID):
|
||||
else:
|
||||
return f[0], f[1], f[2], 0
|
||||
elif f[2] == 0 or f[3] == 0:
|
||||
return f[3], f[0], f[1], f[2]
|
||||
return f[2], f[3], f[0], f[1]
|
||||
return f
|
||||
|
||||
faces_flat = [v for f in faces for v in treat_face(f)]
|
||||
@ -543,85 +550,34 @@ class Text(bpy_types.ID):
|
||||
import bpy
|
||||
return tuple(obj for obj in bpy.data.objects if self in [cont.text for cont in obj.game.controllers if cont.type == 'PYTHON'])
|
||||
|
||||
import collections
|
||||
|
||||
# values are module: [(cls, path, line), ...]
|
||||
TypeMap = {}
|
||||
# Properties (IDPropertyGroup) are different from types because they need to be registered
|
||||
# before adding sub properties to them, so they are registered on definition
|
||||
# and unregistered on unload
|
||||
PropertiesMap = {}
|
||||
|
||||
# Using our own loading function we set this to false
|
||||
# so when running a script directly in the text editor
|
||||
# registers moduals instantly.
|
||||
_register_immediate = True
|
||||
|
||||
|
||||
def _unregister_module(module, free=True):
|
||||
for t in TypeMap.get(module, ()):
|
||||
try:
|
||||
bpy_types.unregister(t)
|
||||
except:
|
||||
import traceback
|
||||
print("bpy.utils._unregister_module(): Module '%s' failed to unregister class '%s.%s'" % (module, t.__module__, t.__name__))
|
||||
traceback.print_exc()
|
||||
|
||||
if free == True and module in TypeMap:
|
||||
del TypeMap[module]
|
||||
|
||||
for t in PropertiesMap.get(module, ()):
|
||||
try:
|
||||
bpy_types.unregister(t)
|
||||
except:
|
||||
import traceback
|
||||
print("bpy.utils._unload_module(): Module '%s' failed to unregister class '%s.%s'" % (module, t.__module__, t.__name__))
|
||||
traceback.print_exc()
|
||||
|
||||
if free == True and module in PropertiesMap:
|
||||
del PropertiesMap[module]
|
||||
|
||||
|
||||
def _register_module(module):
|
||||
for t in TypeMap.get(module, ()):
|
||||
try:
|
||||
bpy_types.register(t)
|
||||
except:
|
||||
import traceback
|
||||
import sys
|
||||
print("bpy.utils._register_module(): '%s' failed to register class '%s.%s'" % (sys.modules[module].__file__, t.__module__, t.__name__))
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
class RNAMeta(type):
|
||||
@classmethod
|
||||
def _register_immediate(cls):
|
||||
return _register_immediate
|
||||
|
||||
def __new__(cls, name, bases, classdict, **args):
|
||||
result = type.__new__(cls, name, bases, classdict)
|
||||
if bases and bases[0] != StructRNA:
|
||||
import traceback
|
||||
import weakref
|
||||
module = result.__module__
|
||||
|
||||
ClassMap = TypeMap
|
||||
|
||||
# Register right away if needed
|
||||
if cls._register_immediate():
|
||||
bpy_types.register(result)
|
||||
ClassMap = PropertiesMap
|
||||
|
||||
# first part of packages only
|
||||
if "." in module:
|
||||
module = module[:module.index(".")]
|
||||
|
||||
ClassMap.setdefault(module, []).append(result)
|
||||
sf = traceback.extract_stack(limit=2)[0]
|
||||
|
||||
TypeMap.setdefault(module, []).append((weakref.ref(result), sf[0], sf[1]))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class RNAMetaRegister(RNAMeta, StructMetaIDProp):
|
||||
@classmethod
|
||||
def _register_immediate(cls):
|
||||
return True
|
||||
import collections
|
||||
|
||||
|
||||
class RNAMetaIDProp(RNAMeta, StructMetaIDProp):
|
||||
pass
|
||||
|
||||
|
||||
class OrderedMeta(RNAMeta):
|
||||
@ -638,7 +594,7 @@ class OrderedMeta(RNAMeta):
|
||||
# with doc generation 'self.properties.bl_rna.properties' can fail
|
||||
class Operator(StructRNA, metaclass=OrderedMeta):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
properties = StructRNA.path_resolve(self, "properties")
|
||||
bl_rna = getattr(properties, "bl_rna", None)
|
||||
@ -650,16 +606,22 @@ class Operator(StructRNA, metaclass=OrderedMeta):
|
||||
properties = StructRNA.path_resolve(self, "properties")
|
||||
bl_rna = getattr(properties, "bl_rna", None)
|
||||
if bl_rna and attr in bl_rna.properties:
|
||||
setattr(properties, attr, value)
|
||||
return setattr(properties, attr, value)
|
||||
return super().__setattr__(attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
properties = StructRNA.path_resolve(self, "properties")
|
||||
bl_rna = getattr(properties, "bl_rna", None)
|
||||
if bl_rna and attr in bl_rna.properties:
|
||||
delattr(properties, attr)
|
||||
return delattr(properties, attr)
|
||||
return super().__delattr__(attr)
|
||||
|
||||
def as_keywords(self, ignore=()):
|
||||
""" Return a copy of the properties as a dictionary.
|
||||
"""
|
||||
ignore = ignore + ("rna_type",)
|
||||
return {attr: getattr(self, attr) for attr in self.properties.rna_type.properties.keys() if attr not in ignore}
|
||||
|
||||
|
||||
class Macro(StructRNA, metaclass=OrderedMeta):
|
||||
# bpy_types is imported before ops is defined
|
||||
@ -672,7 +634,7 @@ class Macro(StructRNA, metaclass=OrderedMeta):
|
||||
return ops.macro_define(self, opname)
|
||||
|
||||
|
||||
class IDPropertyGroup(StructRNA, metaclass=RNAMetaRegister):
|
||||
class IDPropertyGroup(StructRNA, metaclass=RNAMetaIDProp):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@ -680,6 +642,10 @@ class RenderEngine(StructRNA, metaclass=RNAMeta):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class KeyingSetInfo(StructRNA, metaclass=RNAMeta):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class _GenericUI:
|
||||
__slots__ = ()
|
||||
|
||||
@ -691,7 +657,12 @@ class _GenericUI:
|
||||
|
||||
def draw_ls(self, context):
|
||||
for func in draw_ls._draw_funcs:
|
||||
func(self, context)
|
||||
# so bad menu functions dont stop the entire menu from drawing.
|
||||
try:
|
||||
func(self, context)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
draw_funcs = draw_ls._draw_funcs = [cls.draw]
|
||||
cls.draw = draw_ls
|
||||
@ -739,7 +710,7 @@ class Menu(StructRNA, _GenericUI, metaclass=RNAMeta):
|
||||
import bpy.utils
|
||||
|
||||
layout = self.layout
|
||||
|
||||
|
||||
if not searchpaths:
|
||||
layout.label("* Missing Paths *")
|
||||
|
||||
|
@ -1,17 +1,22 @@
|
||||
# Copyright (c) 2009 www.stani.be (GPL license)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
|
@ -1,17 +1,22 @@
|
||||
# Copyright (c) 2009 www.stani.be (GPL license)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
@ -168,7 +173,10 @@ def complete(line, cursor, namespace):
|
||||
'abs(number) -> number\\nReturn the absolute value of the argument.'
|
||||
"""
|
||||
matches = []
|
||||
word = ''
|
||||
scrollback = ''
|
||||
match = RE_DEF_COMPLETE.search(line[:cursor])
|
||||
|
||||
if match:
|
||||
word = match.group(1)
|
||||
func_word = match.group(2)
|
||||
@ -176,7 +184,7 @@ def complete(line, cursor, namespace):
|
||||
func = eval(func_word, namespace)
|
||||
except Exception:
|
||||
func = None
|
||||
scrollback = ''
|
||||
|
||||
if func:
|
||||
doc = get_doc(func)
|
||||
argspec = get_argspec(func, doc=doc)
|
||||
@ -186,7 +194,5 @@ def complete(line, cursor, namespace):
|
||||
elif doc:
|
||||
scrollback += '\n' + doc
|
||||
scrollback = reduce_newlines(scrollback)
|
||||
else:
|
||||
word = ''
|
||||
scrollback = ''
|
||||
|
||||
return matches, word, scrollback
|
||||
|
@ -1,17 +1,22 @@
|
||||
# Copyright (c) 2009 Fernando Perez, www.stani.be (GPL license)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# Original copyright (see docstring):
|
||||
#*****************************************************************************
|
||||
|
@ -1,17 +1,22 @@
|
||||
# Copyright (c) 2009 www.stani.be (GPL license)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
|
@ -1,17 +1,22 @@
|
||||
# Copyright (c) 2009 www.stani.be (GPL license)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Lesser General Public License for more details.
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
|
||||
@ -75,10 +80,10 @@ def complete(line, cursor, namespace, private=True):
|
||||
# unquoted word -> module or attribute completion
|
||||
word = re_unquoted_word.group(1)
|
||||
if RE_MODULE.match(line):
|
||||
import complete_import
|
||||
from . import complete_import
|
||||
matches = complete_import.complete(line)
|
||||
else:
|
||||
import complete_namespace
|
||||
from . import complete_namespace
|
||||
matches = complete_namespace.complete(word, namespace, private)
|
||||
else:
|
||||
# for now we don't have completers for strings
|
||||
@ -112,7 +117,7 @@ def expand(line, cursor, namespace, private=True):
|
||||
'abs(number) -> number\\nReturn the absolute value of the argument.'
|
||||
"""
|
||||
if line[:cursor].strip().endswith('('):
|
||||
import complete_calltip
|
||||
from . import complete_calltip
|
||||
matches, word, scrollback = complete_calltip.complete(line,
|
||||
cursor, namespace)
|
||||
no_calltip = False
|
||||
|
27
release/scripts/modules/image_utils.py
Normal file
@ -0,0 +1,27 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
|
||||
def image_load(filepath, dirpath, place_holder=False, recursive=False, convert_callback=None):
|
||||
import bpy
|
||||
try:
|
||||
return bpy.data.images.load(filepath)
|
||||
except SystemError:
|
||||
return bpy.data.images.new("Untitled", 128, 128)
|
@ -37,7 +37,7 @@ class ExportHelper:
|
||||
|
||||
self.filepath = blend_filepath + self.filename_ext
|
||||
|
||||
context.window_manager.add_fileselect(self)
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def check(self, context):
|
||||
@ -53,7 +53,7 @@ class ImportHelper:
|
||||
filepath = StringProperty(name="File Path", description="Filepath used for importing the file", maxlen=1024, default="", subtype='FILE_PATH')
|
||||
|
||||
def invoke(self, context, event):
|
||||
context.window_manager.add_fileselect(self)
|
||||
context.window_manager.fileselect_add(self)
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
|
||||
|
@ -1,45 +1,81 @@
|
||||
# This file defines a set of methods that are useful for various
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# This file defines a set of methods that are useful for various
|
||||
# Relative Keying Set (RKS) related operations, such as: callbacks
|
||||
# for polling, iterator callbacks, and also generate callbacks.
|
||||
# All of these can be used in conjunction with the others.
|
||||
# for polling, iterator callbacks, and also generate callbacks.
|
||||
# All of these can be used in conjunction with the others.
|
||||
|
||||
__all__ = [
|
||||
"path_add_property",
|
||||
"RKS_POLL_selected_objects",
|
||||
"RKS_POLL_selected_bones",
|
||||
"RKS_POLL_selected_items",
|
||||
"RKS_ITER_selected_item",
|
||||
"RKS_GEN_available",
|
||||
"RKS_GEN_location",
|
||||
"RKS_GEN_rotation",
|
||||
"RKS_GEN_scaling",
|
||||
]
|
||||
|
||||
import bpy
|
||||
|
||||
###########################
|
||||
# General Utilities
|
||||
|
||||
|
||||
# Append the specified property name on the the existing path
|
||||
def path_add_property(path, prop):
|
||||
if len(path):
|
||||
return path + "." + prop;
|
||||
return path + "." + prop
|
||||
else:
|
||||
return prop;
|
||||
return prop
|
||||
|
||||
###########################
|
||||
# Poll Callbacks
|
||||
|
||||
|
||||
# selected objects
|
||||
def RKS_POLL_selected_objects(ksi, context):
|
||||
return context.active_object or len(context.selected_objects);
|
||||
|
||||
return context.active_object or len(context.selected_objects)
|
||||
|
||||
|
||||
# selected bones
|
||||
def RKS_POLL_selected_bones(ksi, context):
|
||||
# we must be in Pose Mode, and there must be some bones selected
|
||||
# we must be in Pose Mode, and there must be some bones selected
|
||||
if (context.active_object) and (context.active_object.mode == 'POSE'):
|
||||
if context.active_pose_bone or len(context.selected_pose_bones):
|
||||
return True;
|
||||
|
||||
# nothing selected
|
||||
return False;
|
||||
return True
|
||||
|
||||
# nothing selected
|
||||
return False
|
||||
|
||||
|
||||
# selected bones or objects
|
||||
def RKS_POLL_selected_items(ksi, context):
|
||||
return RKS_POLL_selected_bones(ksi, context) or RKS_POLL_selected_objects(ksi, context);
|
||||
return RKS_POLL_selected_bones(ksi, context) or RKS_POLL_selected_objects(ksi, context)
|
||||
|
||||
###########################
|
||||
# Iterator Callbacks
|
||||
|
||||
|
||||
# all selected objects or pose bones, depending on which we've got
|
||||
def RKS_ITER_selected_item(ksi, context, ks):
|
||||
if (context.active_object) and (context.active_object.mode == 'POSE'):
|
||||
@ -52,34 +88,49 @@ def RKS_ITER_selected_item(ksi, context, ks):
|
||||
###########################
|
||||
# Generate Callbacks
|
||||
|
||||
|
||||
# 'Available' F-Curves
|
||||
def RKS_GEN_available(ksi, context, ks, data):
|
||||
# try to get the animation data associated with the closest
|
||||
# try to get the animation data associated with the closest
|
||||
# ID-block to the data (neither of which may exist/be easy to find)
|
||||
id_block = data.id_data
|
||||
adt = getattr(id_block, "animation_data", None)
|
||||
|
||||
# there must also be an active action...
|
||||
if adt is None or adt.action is None:
|
||||
return;
|
||||
|
||||
# for each F-Curve, include an path to key it
|
||||
return
|
||||
|
||||
# if we haven't got an ID-block as 'data', try to restrict
|
||||
# paths added to only those which branch off from here
|
||||
# i.e. for bones
|
||||
if id_block != data:
|
||||
basePath = data.path_from_id()
|
||||
else:
|
||||
basePath = None # this is not needed...
|
||||
|
||||
# for each F-Curve, include a path to key it
|
||||
# NOTE: we don't need to set the group settings here
|
||||
for fcu in adt.action.fcurves:
|
||||
ks.paths.add(id_block, fcu.data_path, index=fcu.array_index)
|
||||
|
||||
if basePath:
|
||||
if basePath in fcu.data_path:
|
||||
ks.paths.add(id_block, fcu.data_path, index=fcu.array_index)
|
||||
else:
|
||||
ks.paths.add(id_block, fcu.data_path, index=fcu.array_index)
|
||||
|
||||
# ------
|
||||
|
||||
|
||||
# get ID block and based ID path for transform generators
|
||||
# private function
|
||||
def get_transform_generators_base_info(data):
|
||||
# ID-block for the data
|
||||
# ID-block for the data
|
||||
id_block = data.id_data
|
||||
|
||||
|
||||
# get base path and grouping method/name
|
||||
if isinstance(data, bpy.types.ID):
|
||||
# no path in this case
|
||||
path = ""
|
||||
|
||||
|
||||
# data on ID-blocks directly should get grouped by the KeyingSet
|
||||
grouping = None
|
||||
else:
|
||||
@ -89,29 +140,31 @@ def get_transform_generators_base_info(data):
|
||||
# try to use the name of the data element to group the F-Curve
|
||||
# else fallback on the KeyingSet name
|
||||
grouping = getattr(data, "name", None)
|
||||
|
||||
|
||||
# return the ID-block and the path
|
||||
return id_block, path, grouping
|
||||
|
||||
# Location
|
||||
|
||||
# Location
|
||||
def RKS_GEN_location(ksi, context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
|
||||
# add the property name to the base path
|
||||
path = path_add_property(base_path, "location")
|
||||
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
# Rotation
|
||||
|
||||
# Rotation
|
||||
def RKS_GEN_rotation(ksi, context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping= get_transform_generators_base_info(data)
|
||||
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# add the property name to the base path
|
||||
# rotation mode affects the property used
|
||||
if data.rotation_mode == 'QUATERNION':
|
||||
@ -120,40 +173,24 @@ def RKS_GEN_rotation(ksi, context, ks, data):
|
||||
path = path_add_property(base_path, "rotation_axis_angle")
|
||||
else:
|
||||
path = path_add_property(base_path, "rotation_euler")
|
||||
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
# Scaling
|
||||
|
||||
# Scaling
|
||||
def RKS_GEN_scaling(ksi, context, ks, data):
|
||||
# get id-block and path info
|
||||
id_block, base_path, grouping= get_transform_generators_base_info(data)
|
||||
|
||||
id_block, base_path, grouping = get_transform_generators_base_info(data)
|
||||
|
||||
# add the property name to the base path
|
||||
path = path_add_property(base_path, "scale")
|
||||
|
||||
|
||||
# add Keying Set entry for this...
|
||||
if grouping:
|
||||
ks.paths.add(id_block, path, group_method='NAMED', group_name=grouping)
|
||||
else:
|
||||
ks.paths.add(id_block, path)
|
||||
|
||||
###########################
|
||||
# Un-needed stuff which is here to just shut up the warnings...
|
||||
|
||||
classes = []
|
||||
|
||||
def register():
|
||||
pass
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
||||
###########################
|
@ -1,560 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
|
||||
# TODO, have these in a more general module
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
SPECIAL_TYPES = "root",
|
||||
LAYER_TYPES = "main", "extra", "ik", "fk"
|
||||
|
||||
ORG_LAYERS = [n == 31 for n in range(0, 32)]
|
||||
MCH_LAYERS = [n == 30 for n in range(0, 32)]
|
||||
DEF_LAYERS = [n == 29 for n in range(0, 32)]
|
||||
ROOT_LAYERS = [n == 28 for n in range(0, 32)]
|
||||
|
||||
ORG_PREFIX = "ORG-"
|
||||
MCH_PREFIX = "MCH-"
|
||||
DEF_PREFIX = "DEF-"
|
||||
|
||||
WGT_PREFIX = "WGT-"
|
||||
|
||||
|
||||
class RigifyError(Exception):
|
||||
"""Exception raised for errors in the metarig.
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.message)
|
||||
|
||||
|
||||
def submodule_func_from_type(bone_type):
|
||||
type_pair = bone_type.split(".")
|
||||
|
||||
# 'leg.ik' will look for an ik function in the leg module
|
||||
# 'leg' will look up leg.main
|
||||
if len(type_pair) == 1:
|
||||
type_pair = type_pair[0], "main"
|
||||
|
||||
type_name, func_name = type_pair
|
||||
|
||||
# from rigify import leg
|
||||
try:
|
||||
submod = __import__(name="%s.%s" % (__package__, type_name), fromlist=[type_name])
|
||||
except ImportError:
|
||||
raise RigifyError("python module for type '%s' not found" % type_name)
|
||||
|
||||
reload(submod)
|
||||
return type_name, submod, getattr(submod, func_name)
|
||||
|
||||
|
||||
def get_submodule_types():
|
||||
import os
|
||||
submodules = []
|
||||
files = os.listdir(os.path.dirname(__file__))
|
||||
for f in files:
|
||||
if not f.startswith("_") and f.endswith(".py"):
|
||||
submodules.append(f[:-3])
|
||||
|
||||
return sorted(submodules)
|
||||
|
||||
|
||||
def get_bone_type_options(pbone, type_name):
|
||||
options = {}
|
||||
bone_name = pbone.name
|
||||
for key, value in pbone.items():
|
||||
key_pair = key.rsplit(".")
|
||||
# get all bone properties
|
||||
""""
|
||||
if key_pair[0] == type_name:
|
||||
if len(key_pair) != 2:
|
||||
raise RigifyError("option error for bone '%s', property name was not a pair '%s'" % (bone_name, key_pair))
|
||||
options[key_pair[1]] = value
|
||||
"""
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def get_layer_dict(options):
|
||||
'''
|
||||
Extracts layer info from a bone options dict
|
||||
defaulting to the layer index if not set.
|
||||
'''
|
||||
layer_default = [False] * 32
|
||||
result = {}
|
||||
for i, layer_type in enumerate(LAYER_TYPES):
|
||||
# no matter if its not defined
|
||||
layer_index = options.get("layer_" + layer_type, i + 2)
|
||||
layer = layer_default[:]
|
||||
layer[layer_index-1] = True
|
||||
result[layer_type] = layer
|
||||
return result
|
||||
|
||||
|
||||
def validate_rig(context, obj):
|
||||
'''
|
||||
Makes no changes
|
||||
only runs the metarig definitions and reports errors
|
||||
'''
|
||||
type_found = False
|
||||
|
||||
for pbone in obj.pose.bones:
|
||||
bone_name = pbone.name
|
||||
bone_type = pbone.get("type", "")
|
||||
|
||||
if bone_type:
|
||||
bone_type_list = [bt for bt in bone_type.replace(",", " ").split()]
|
||||
else:
|
||||
bone_type_list = []
|
||||
|
||||
for bone_type in bone_type_list:
|
||||
if bone_type.split(".")[0] in SPECIAL_TYPES:
|
||||
continue
|
||||
|
||||
type_name, submod, type_func = submodule_func_from_type(bone_type)
|
||||
reload(submod)
|
||||
submod.metarig_definition(obj, bone_name)
|
||||
type_found = True
|
||||
|
||||
get_bone_type_options(pbone, bone_type)
|
||||
|
||||
# missing, - check for duplicate root bone.
|
||||
|
||||
if not type_found:
|
||||
raise RigifyError("This rig has no 'type' properties defined on any pose bones, nothing to do")
|
||||
|
||||
|
||||
def generate_rig(context, obj_orig, prefix="ORG-", META_DEF=True):
|
||||
'''
|
||||
Main function for generating
|
||||
'''
|
||||
from collections import OrderedDict
|
||||
import rigify_utils
|
||||
reload(rigify_utils)
|
||||
|
||||
print("Begin...")
|
||||
|
||||
# Not needed but catches any errors before duplicating
|
||||
validate_rig(context, obj_orig)
|
||||
|
||||
use_global_undo = context.user_preferences.edit.use_global_undo
|
||||
context.user_preferences.edit.use_global_undo = False
|
||||
mode_orig = context.mode
|
||||
rest_backup = obj_orig.data.pose_position
|
||||
obj_orig.data.pose_position = 'REST'
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
scene = context.scene
|
||||
|
||||
# Check if the generated rig already exists, so we can
|
||||
# regenerate in the same object. If not, create a new
|
||||
# object to generate the rig in.
|
||||
print("Fetch rig.")
|
||||
try:
|
||||
name = obj_orig["rig_object_name"]
|
||||
except KeyError:
|
||||
name = "rig"
|
||||
|
||||
try:
|
||||
obj = scene.objects[name]
|
||||
except KeyError:
|
||||
obj = bpy.data.objects.new(name, bpy.data.armatures.new(name))
|
||||
scene.objects.link(obj)
|
||||
|
||||
obj.data.pose_position = 'POSE'
|
||||
|
||||
# Get rid of anim data in case the rig already existed
|
||||
print("Clear rig animation data.")
|
||||
obj.animation_data_clear()
|
||||
|
||||
# Select generated rig object
|
||||
obj_orig.select = False
|
||||
obj.select = True
|
||||
scene.objects.active = obj
|
||||
|
||||
# Remove all bones from the generated rig armature.
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
for bone in obj.data.edit_bones:
|
||||
obj.data.edit_bones.remove(bone)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Create temporary duplicates for merging
|
||||
temp_rig_1 = obj_orig.copy()
|
||||
temp_rig_1.data = obj_orig.data.copy()
|
||||
scene.objects.link(temp_rig_1)
|
||||
|
||||
temp_rig_2 = obj_orig.copy()
|
||||
temp_rig_2.data = obj.data
|
||||
scene.objects.link(temp_rig_2)
|
||||
|
||||
# Select the temp rigs for merging
|
||||
for objt in scene.objects:
|
||||
objt.select = False # deselect all objects
|
||||
temp_rig_1.select = True
|
||||
temp_rig_2.select = True
|
||||
scene.objects.active = temp_rig_2
|
||||
|
||||
# Merge the temporary rigs
|
||||
bpy.ops.object.join(context)
|
||||
|
||||
# Delete the second temp rig
|
||||
bpy.ops.object.delete()
|
||||
|
||||
# Select the generated rig
|
||||
for objt in scene.objects:
|
||||
objt.select = False # deselect all objects
|
||||
obj.select = True
|
||||
scene.objects.active = obj
|
||||
|
||||
# Copy over the pose_bone properties
|
||||
for bone in obj_orig.pose.bones:
|
||||
bone_gen = obj.pose.bones[bone.name]
|
||||
|
||||
# Rotation mode and transform locks
|
||||
bone_gen.rotation_mode = bone.rotation_mode
|
||||
bone_gen.lock_rotation = tuple(bone.lock_rotation)
|
||||
bone_gen.lock_rotation_w = bone.lock_rotation_w
|
||||
bone_gen.lock_rotations_4d = bone.lock_rotations_4d
|
||||
bone_gen.lock_location = tuple(bone.lock_location)
|
||||
bone_gen.lock_scale = tuple(bone.lock_scale)
|
||||
|
||||
# Custom properties
|
||||
for prop in bone.keys():
|
||||
bone_gen[prop] = bone[prop]
|
||||
|
||||
# Copy over bone properties
|
||||
for bone in obj_orig.data.bones:
|
||||
bone_gen = obj.data.bones[bone.name]
|
||||
|
||||
# B-bone stuff
|
||||
bone_gen.bbone_segments = bone.bbone_segments
|
||||
bone_gen.bbone_in = bone.bbone_in
|
||||
bone_gen.bbone_out = bone.bbone_out
|
||||
|
||||
|
||||
# Create proxy deformation rig
|
||||
# TODO: remove this
|
||||
if META_DEF:
|
||||
obj_def = obj_orig.copy()
|
||||
obj_def.data = obj_orig.data.copy()
|
||||
scene.objects.link(obj_def)
|
||||
|
||||
scene.update()
|
||||
print("On to the real work.")
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# prepend the ORG prefix to the bones, and create the base_names mapping
|
||||
base_names = {}
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
for bone in arm.edit_bones:
|
||||
bone_name = bone.name
|
||||
bone.name = ORG_PREFIX + bone_name
|
||||
base_names[bone.name] = bone_name
|
||||
|
||||
# create root_bone
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
edit_bone = obj.data.edit_bones.new("root")
|
||||
root_bone = edit_bone.name
|
||||
edit_bone.head = (0.0, 0.0, 0.0)
|
||||
edit_bone.tail = (0.0, 1.0, 0.0)
|
||||
edit_bone.roll = 0.0
|
||||
edit_bone.layers = ROOT_LAYERS
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# key: bone name
|
||||
# value: {type:definition, ...}
|
||||
# where type is the submodule name - leg, arm etc
|
||||
# and definition is a list of bone names
|
||||
bone_definitions = {}
|
||||
|
||||
# key: bone name
|
||||
# value: [functions, ...]
|
||||
# each function is from the module. eg leg.ik, arm.main
|
||||
bone_typeinfos = {}
|
||||
|
||||
# key: bone name
|
||||
# value: [new_bone_name, ...]
|
||||
# where each bone with a 'type' stores a list of bones that it created
|
||||
# ...needed so we can override the root parent
|
||||
bone_genesis = {}
|
||||
|
||||
|
||||
# inspect all bones and assign their definitions before modifying
|
||||
for pbone in obj.pose.bones:
|
||||
bone_name = pbone.name
|
||||
bone_type = pbone.get("type", "")
|
||||
if bone_type:
|
||||
bone_type_list = [bt for bt in bone_type.replace(",", " ").split()]
|
||||
|
||||
# not essential but means running autorig again wont do anything
|
||||
del pbone["type"]
|
||||
else:
|
||||
bone_type_list = []
|
||||
|
||||
for bone_type in bone_type_list:
|
||||
type_name, submod, type_func = submodule_func_from_type(bone_type)
|
||||
reload(submod)
|
||||
|
||||
bone_def_dict = bone_definitions.setdefault(bone_name, {})
|
||||
|
||||
# Only calculate bone definitions once
|
||||
if type_name not in bone_def_dict:
|
||||
bone_def_dict[type_name] = submod.metarig_definition(obj, bone_name)
|
||||
|
||||
bone_typeinfo = bone_typeinfos.setdefault(bone_name, [])
|
||||
bone_typeinfo.append((type_name, type_func))
|
||||
|
||||
|
||||
# sort bones, not needed but gives more pradictable execution which may be useful in rare cases
|
||||
bones_sorted = obj.pose.bones.values()
|
||||
bones_sorted.sort(key=lambda pbone: pbone.name) # first sort by names
|
||||
bones_sorted.sort(key=lambda pbone: len(pbone.parent_recursive)) # parents before children
|
||||
|
||||
# now we have all the info about bones we can start operating on them
|
||||
# for pbone in obj.pose.bones:
|
||||
for pbone in bones_sorted:
|
||||
bone_name = pbone.name
|
||||
print(bone_name)
|
||||
if bone_name not in bone_typeinfos:
|
||||
continue
|
||||
|
||||
bone_def_dict = bone_definitions[bone_name]
|
||||
|
||||
# Only blend results from the same submodule, eg.
|
||||
# leg.ik and arm.fk could not be blended.
|
||||
results = OrderedDict()
|
||||
|
||||
bone_names_pre = {bone.name for bone in arm.bones}
|
||||
|
||||
for type_name, type_func in bone_typeinfos[bone_name]:
|
||||
print(" " + type_name)
|
||||
# this bones definition of the current typeinfo
|
||||
definition = bone_def_dict[type_name]
|
||||
options = get_bone_type_options(pbone, type_name)
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
ret = type_func(obj, definition, base_names, options)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
if ret:
|
||||
result_submod = results.setdefault(type_name, [])
|
||||
|
||||
if result_submod and len(result_submod[-1]) != len(ret):
|
||||
raise RigifyError("bone lists not compatible: %s, %s" % (result_submod[-1], ret))
|
||||
|
||||
result_submod.append(ret)
|
||||
|
||||
for result_submod in results.values():
|
||||
# blend 2 chains
|
||||
definition = bone_def_dict[type_name]
|
||||
|
||||
if len(result_submod) == 2:
|
||||
blend_bone_list(obj, definition, result_submod[0], result_submod[1], target_bone=bone_name)
|
||||
|
||||
|
||||
bone_names_post = {bone.name for bone in arm.bones}
|
||||
|
||||
# Store which bones were created from this one
|
||||
bone_genesis[bone_name] = list(bone_names_post - bone_names_pre)
|
||||
|
||||
# need a reverse lookup on bone_genesis so as to know immediately
|
||||
# where a bone comes from
|
||||
bone_genesis_reverse = {}
|
||||
'''
|
||||
for bone_name, bone_children in bone_genesis.items():
|
||||
for bone_child_name in bone_children:
|
||||
bone_genesis_reverse[bone_child_name] = bone_name
|
||||
'''
|
||||
|
||||
|
||||
if root_bone:
|
||||
# assign all new parentless bones to this
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
root_ebone = arm.edit_bones[root_bone]
|
||||
for ebone in arm.edit_bones:
|
||||
bone_name = ebone.name
|
||||
if ebone.parent is None:
|
||||
ebone.parent = root_ebone
|
||||
'''
|
||||
if ebone.parent is None and bone_name not in base_names:
|
||||
# check for override
|
||||
bone_creator = bone_genesis_reverse[bone_name]
|
||||
pbone_creator = obj.pose.bones[bone_creator]
|
||||
root_bone_override = pbone_creator.get("root", "")
|
||||
|
||||
if root_bone_override:
|
||||
root_ebone_tmp = arm.edit_bones[root_bone_override]
|
||||
else:
|
||||
root_ebone_tmp = root_ebone
|
||||
|
||||
ebone.use_connect = False
|
||||
ebone.parent = root_ebone_tmp
|
||||
'''
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
if META_DEF:
|
||||
# for pbone in obj_def.pose.bones:
|
||||
for bone_name, bone_name_new in base_names.items():
|
||||
#pbone_from = bone_name
|
||||
pbone = obj_def.pose.bones[bone_name_new]
|
||||
|
||||
con = pbone.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone_name
|
||||
|
||||
if not pbone.bone.use_connect:
|
||||
con = pbone.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone_name
|
||||
|
||||
# would be 'REST' from when copied
|
||||
obj_def.data.pose_position = 'POSE'
|
||||
|
||||
# todo - make a more generic system?
|
||||
layer_tot = [False] * 32
|
||||
layer_last = layer_tot[:]
|
||||
layer_last[31] = True
|
||||
layer_second_last = layer_tot[:]
|
||||
layer_second_last[30] = True
|
||||
|
||||
for bone_name, bone in arm.bones.items():
|
||||
bone.use_deform = False # Non DEF bones shouldn't deform
|
||||
if bone_name.startswith(ORG_PREFIX):
|
||||
bone.layers = ORG_LAYERS
|
||||
elif bone_name.startswith(MCH_PREFIX): # XXX fixme
|
||||
bone.layers = MCH_LAYERS
|
||||
elif bone_name.startswith(DEF_PREFIX): # XXX fixme
|
||||
bone.layers = DEF_LAYERS
|
||||
bone.use_deform = True
|
||||
else:
|
||||
# Assign bone appearance if there is a widget for it
|
||||
obj.pose.bones[bone_name].custom_shape = context.scene.objects.get(WGT_PREFIX + bone_name)
|
||||
|
||||
layer_tot[:] = [max(lay) for lay in zip(layer_tot, bone.layers)]
|
||||
|
||||
# Only for demo'ing
|
||||
layer_show = [a and not (b or c or d) for a, b, c, d in zip(layer_tot, ORG_LAYERS, MCH_LAYERS, DEF_LAYERS)]
|
||||
arm.layers = layer_show
|
||||
|
||||
|
||||
# obj.hide = True
|
||||
obj.data.show_axes = False
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
obj_orig.data.pose_position = rest_backup
|
||||
obj.data.pose_position = 'POSE'
|
||||
obj_orig.data.pose_position = 'POSE'
|
||||
context.user_preferences.edit.use_global_undo = use_global_undo
|
||||
|
||||
print("Done.\n")
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def generate_test(context, metarig_type="", GENERATE_FINAL=True):
|
||||
import os
|
||||
new_objects = []
|
||||
|
||||
scene = context.scene
|
||||
|
||||
def create_empty_armature(name):
|
||||
armature = bpy.data.armatures.new(name)
|
||||
obj_new = bpy.data.objects.new(name, armature)
|
||||
scene.objects.link(obj_new)
|
||||
scene.objects.active = obj_new
|
||||
for obj in scene.objects:
|
||||
obj.select = False
|
||||
obj_new.select = True
|
||||
|
||||
for module_name in get_submodule_types():
|
||||
if (metarig_type and module_name != metarig_type):
|
||||
continue
|
||||
|
||||
# XXX workaround!, problem with updating the pose matrix.
|
||||
if module_name == "delta":
|
||||
continue
|
||||
|
||||
type_name, submodule, func = submodule_func_from_type(module_name)
|
||||
|
||||
metarig_template = getattr(submodule, "metarig_template", None)
|
||||
|
||||
if metarig_template:
|
||||
create_empty_armature("meta_" + module_name) # sets active
|
||||
metarig_template()
|
||||
obj = context.active_object
|
||||
obj.location = scene.cursor_location
|
||||
|
||||
if GENERATE_FINAL:
|
||||
obj_new = generate_rig(context, obj)
|
||||
new_objects.append((obj, obj_new))
|
||||
else:
|
||||
new_objects.append((obj, None))
|
||||
else:
|
||||
print("note: rig type '%s' has no metarig_template(), can't test this" % module_name)
|
||||
|
||||
return new_objects
|
||||
|
||||
|
||||
def generate_test_all(context, GRAPH=False):
|
||||
import rigify
|
||||
import rigify_utils
|
||||
import graphviz_export
|
||||
import os
|
||||
reload(rigify)
|
||||
reload(rigify_utils)
|
||||
reload(graphviz_export)
|
||||
|
||||
new_objects = rigify.generate_test(context)
|
||||
|
||||
if GRAPH:
|
||||
base_name = os.path.splitext(bpy.data.filepath)[0]
|
||||
for obj, obj_new in new_objects:
|
||||
for obj in (obj, obj_new):
|
||||
fn = base_name + "-" + bpy.path.clean_name(obj.name)
|
||||
|
||||
path_dot = fn + ".dot"
|
||||
path_png = fn + ".png"
|
||||
saved = graphviz_export.graph_armature(obj, path_dot, CONSTRAINTS=True, DRIVERS=True)
|
||||
|
||||
#if saved:
|
||||
# os.system("dot -Tpng %s > %s; eog %s" % (path_dot, path_png, path_png))
|
||||
|
||||
i = 0
|
||||
for obj, obj_new in new_objects:
|
||||
obj.data.draw_type = 'STICK'
|
||||
obj.location[1] += i
|
||||
obj_new.location[1] += i
|
||||
obj_new.select = False
|
||||
obj.select = True
|
||||
i += 4
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_rig(bpy.context, bpy.context.active_object)
|
@ -1,396 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from math import radians, pi
|
||||
from rigify import RigifyError, ORG_PREFIX
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, add_pole_target_bone, add_stretch_to, blend_bone_list, get_side_name, get_base_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from mathutils import Vector
|
||||
|
||||
METARIG_NAMES = "shoulder", "arm", "forearm", "hand"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('shoulder')
|
||||
bone.head[:] = 0.0000, -0.0425, 0.0000
|
||||
bone.tail[:] = 0.0942, -0.0075, 0.0333
|
||||
bone.roll = -0.2227
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('upper_arm')
|
||||
bone.head[:] = 0.1066, -0.0076, -0.0010
|
||||
bone.tail[:] = 0.2855, 0.0206, -0.0104
|
||||
bone.roll = 1.6152
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['shoulder']
|
||||
bone = arm.edit_bones.new('forearm')
|
||||
bone.head[:] = 0.2855, 0.0206, -0.0104
|
||||
bone.tail[:] = 0.4550, -0.0076, -0.0023
|
||||
bone.roll = 1.5153
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['upper_arm']
|
||||
bone = arm.edit_bones.new('hand')
|
||||
bone.head[:] = 0.4550, -0.0076, -0.0023
|
||||
bone.tail[:] = 0.5423, -0.0146, -0.0131
|
||||
bone.roll = -3.0083
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['forearm']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['upper_arm']
|
||||
pbone['type'] = 'arm_biped'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
mt = bone_class_instance(obj, METARIG_NAMES) # meta
|
||||
mt.arm = orig_bone_name
|
||||
mt.update()
|
||||
|
||||
mt.shoulder_p = mt.arm_p.parent
|
||||
|
||||
if not mt.shoulder_p:
|
||||
raise RigifyError("could not find '%s' parent, skipping:" % orig_bone_name)
|
||||
|
||||
mt.shoulder = mt.shoulder_p.name
|
||||
|
||||
# We could have some bones attached, find the bone that has this as its 2nd parent
|
||||
hands = []
|
||||
for pbone in obj.pose.bones:
|
||||
index = pbone.parent_index(mt.arm_p)
|
||||
if index == 2 and pbone.bone.use_connect and pbone.bone.parent.use_connect:
|
||||
hands.append(pbone)
|
||||
|
||||
if len(hands) != 1:
|
||||
raise RigifyError("Found %s possible hands attached to this arm, expected 1 from bone: %s" % ([pbone.name for pbone in hands], orig_bone_name))
|
||||
|
||||
# first add the 2 new bones
|
||||
mt.hand_p = hands[0]
|
||||
mt.hand = mt.hand_p.name
|
||||
|
||||
mt.forearm_p = mt.hand_p.parent
|
||||
mt.forearm = mt.forearm_p.name
|
||||
|
||||
return mt.names()
|
||||
|
||||
|
||||
def ik(obj, definitions, base_names, options):
|
||||
|
||||
arm = obj.data
|
||||
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.shoulder, mt.arm, mt.forearm, mt.hand = definitions
|
||||
mt.update()
|
||||
|
||||
ik = bone_class_instance(obj, ["pole", "pole_vis", "hand_vis"])
|
||||
ik_chain = mt.copy(to_fmt="MCH-%s_ik", base_names=base_names, exclude_attrs=["shoulder"])
|
||||
|
||||
# IK needs no parent_index
|
||||
ik_chain.hand_e.use_connect = False
|
||||
ik_chain.hand_e.parent = None
|
||||
ik_chain.hand_e.use_local_location = False
|
||||
ik_chain.rename("hand", get_base_name(base_names[mt.hand]) + "_ik" + get_side_name(mt.hand))
|
||||
|
||||
ik_chain.arm_e.use_connect = False
|
||||
ik_chain.arm_e.parent = mt.shoulder_e
|
||||
|
||||
# Add the bone used for the arms poll target
|
||||
#ik.pole = add_pole_target_bone(obj, mt.forearm, get_base_name(base_names[mt.forearm]) + "_target" + get_side_name(mt.forearm), mode='ZAVERAGE')
|
||||
ik.pole = add_pole_target_bone(obj, mt.forearm, "elbow_target" + get_side_name(mt.forearm), mode='ZAVERAGE')
|
||||
|
||||
ik.update()
|
||||
ik.pole_e.use_local_location = False
|
||||
|
||||
# option: elbow_parent
|
||||
elbow_parent_name = options.get("elbow_parent", "")
|
||||
|
||||
if elbow_parent_name:
|
||||
try:
|
||||
elbow_parent_e = arm.edit_bones[ORG_PREFIX + elbow_parent_name]
|
||||
except:
|
||||
# TODO, old/new parent mapping
|
||||
raise RigifyError("parent bone from property 'arm_biped_generic.elbow_parent' not found '%s'" % elbow_parent_name)
|
||||
ik.pole_e.parent = elbow_parent_e
|
||||
|
||||
# update bones after this!
|
||||
ik.hand_vis = add_stretch_to(obj, mt.hand, ik_chain.hand, "VIS-%s_ik" % base_names[mt.hand])
|
||||
ik.pole_vis = add_stretch_to(obj, mt.forearm, ik.pole, "VIS-%s_ik" % base_names[mt.forearm])
|
||||
|
||||
ik.update()
|
||||
ik.hand_vis_e.hide_select = True
|
||||
ik.pole_vis_e.hide_select = True
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
ik.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set IK dof
|
||||
ik_chain.forearm_p.lock_ik_x = False
|
||||
ik_chain.forearm_p.lock_ik_y = True
|
||||
ik_chain.forearm_p.lock_ik_z = True
|
||||
|
||||
con = ik_chain.forearm_p.constraints.new('IK')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.hand
|
||||
con.pole_target = obj
|
||||
con.pole_subtarget = ik.pole
|
||||
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.chain_count = 2
|
||||
con.pole_angle = -pi/2
|
||||
|
||||
# last step setup layers
|
||||
if "ik_layer" in options:
|
||||
layer = [n==options["ik_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(mt.arm_b.layers)
|
||||
ik_chain.hand_b.layers = layer
|
||||
ik.hand_vis_b.layers = layer
|
||||
ik.pole_b.layers = layer
|
||||
ik.pole_vis_b.layers = layer
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
# don't blend the shoulder
|
||||
return [None] + ik_chain.names()
|
||||
|
||||
|
||||
def fk(obj, definitions, base_names, options):
|
||||
|
||||
arm = obj.data
|
||||
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.shoulder, mt.arm, mt.forearm, mt.hand = definitions
|
||||
mt.update()
|
||||
|
||||
ex = bone_class_instance(obj, ["socket", "hand_delta"])
|
||||
fk_chain = mt.copy(base_names=base_names)
|
||||
|
||||
# shoulder is used as a hinge
|
||||
fk_chain.rename("shoulder", "MCH-%s_hinge" % base_names[mt.arm])
|
||||
fk_chain.shoulder_e.translate(Vector((0.0, fk_chain.shoulder_e.length / 2, 0.0)))
|
||||
|
||||
# upper arm constrains to this.
|
||||
ex.socket_e = copy_bone_simple(arm, mt.arm, "MCH-%s_socket" % base_names[mt.arm])
|
||||
ex.socket = ex.socket_e.name
|
||||
ex.socket_e.use_connect = False
|
||||
ex.socket_e.parent = mt.shoulder_e
|
||||
ex.socket_e.length *= 0.5
|
||||
|
||||
# insert the 'MCH-delta_hand', between the forearm and the hand
|
||||
# copies forarm rotation
|
||||
ex.hand_delta_e = copy_bone_simple(arm, fk_chain.hand, "MCH-delta_%s" % base_names[mt.hand], parent=True)
|
||||
ex.hand_delta = ex.hand_delta_e.name
|
||||
ex.hand_delta_e.length *= 0.5
|
||||
ex.hand_delta_e.use_connect = False
|
||||
if "hand_roll" in options:
|
||||
ex.hand_delta_e.roll += radians(options["hand_roll"])
|
||||
|
||||
fk_chain.hand_e.use_connect = False
|
||||
fk_chain.hand_e.parent = ex.hand_delta_e
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
ex.update()
|
||||
fk_chain.update()
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
fk_chain.forearm_p.rotation_mode = 'XYZ'
|
||||
fk_chain.forearm_p.lock_rotation = (False, True, True)
|
||||
fk_chain.hand_p.rotation_mode = 'ZXY'
|
||||
fk_chain.arm_p.lock_location = True, True, True
|
||||
|
||||
con = fk_chain.arm_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.socket
|
||||
|
||||
fk_chain.hand_p.lock_location = True, True, True
|
||||
con = ex.hand_delta_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.forearm
|
||||
|
||||
def hinge_setup():
|
||||
# Hinge constraint & driver
|
||||
con = fk_chain.shoulder_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = mt.shoulder
|
||||
driver_fcurve = con.driver_add("influence")
|
||||
driver = driver_fcurve.driver
|
||||
|
||||
|
||||
controller_path = fk_chain.arm_p.path_from_id()
|
||||
# add custom prop
|
||||
fk_chain.arm_p["hinge"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(fk_chain.arm_p, "hinge", create=True)
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "hinge"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + '["hinge"]'
|
||||
|
||||
mod = driver_fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
hinge_setup()
|
||||
|
||||
# last step setup layers
|
||||
if "fk_layer" in options:
|
||||
layer = [n==options["fk_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(mt.arm_b.layers)
|
||||
fk_chain.arm_b.layers = layer
|
||||
fk_chain.forearm_b.layers = layer
|
||||
fk_chain.hand_b.layers = layer
|
||||
|
||||
# Forearm was getting wrong roll somehow. Hack to fix that.
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
fk_chain.update()
|
||||
mt.update()
|
||||
fk_chain.forearm_e.roll = mt.forearm_e.roll
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
return None, fk_chain.arm, fk_chain.forearm, fk_chain.hand
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create upper arm bones: two bones, each half of the upper arm.
|
||||
uarm1 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.01" % base_names[definitions[1]], parent=True)
|
||||
uarm2 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.02" % base_names[definitions[1]], parent=True)
|
||||
uarm1.use_connect = False
|
||||
uarm2.use_connect = False
|
||||
uarm2.parent = uarm1
|
||||
center = uarm1.center
|
||||
uarm1.tail = center
|
||||
uarm2.head = center
|
||||
|
||||
# Create forearm bones: two bones, each half of the forearm.
|
||||
farm1 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.01" % base_names[definitions[2]], parent=True)
|
||||
farm2 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.02" % base_names[definitions[2]], parent=True)
|
||||
farm1.use_connect = False
|
||||
farm2.use_connect = False
|
||||
farm2.parent = farm1
|
||||
center = farm1.center
|
||||
farm1.tail = center
|
||||
farm2.head = center
|
||||
|
||||
# Create twist bone
|
||||
twist = copy_bone_simple(obj.data, definitions[2], "MCH-arm_twist")
|
||||
twist.use_connect = False
|
||||
twist.parent = obj.data.edit_bones[definitions[3]]
|
||||
twist.length /= 2
|
||||
|
||||
# Create hand bone
|
||||
hand = copy_bone_simple(obj.data, definitions[3], "DEF-%s" % base_names[definitions[3]], parent=True)
|
||||
|
||||
# Store names before leaving edit mode
|
||||
uarm1_name = uarm1.name
|
||||
uarm2_name = uarm2.name
|
||||
farm1_name = farm1.name
|
||||
farm2_name = farm2.name
|
||||
twist_name = twist.name
|
||||
hand_name = hand.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bones
|
||||
uarm1 = obj.pose.bones[uarm1_name]
|
||||
uarm2 = obj.pose.bones[uarm2_name]
|
||||
farm1 = obj.pose.bones[farm1_name]
|
||||
farm2 = obj.pose.bones[farm2_name]
|
||||
twist = obj.pose.bones[twist_name]
|
||||
hand = obj.pose.bones[hand_name]
|
||||
|
||||
# Upper arm constraints
|
||||
con = uarm1.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = uarm1.constraints.new('COPY_SCALE')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
con = uarm2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
# Forearm constraints
|
||||
con = farm1.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = farm1.constraints.new('COPY_SCALE')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = farm2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = twist.name
|
||||
|
||||
con = farm2.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
# Hand constraint
|
||||
con = hand.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
return (uarm1_name, uarm2_name, farm1_name, farm2_name, hand_name)
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_fk = fk(obj, bone_definition, base_names, options)
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
bones_deform = deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
blend_bone_list(obj, bone_definition, bones_fk, bones_ik, target_bone=bones_ik[3], target_prop="ik", blend_default=0.0)
|
@ -1,112 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
|
||||
METARIG_NAMES = ("cpy",)
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return (orig_bone_name,)
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, definitions[0], "DEF-%s" % base_names[definitions[0]], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[0]
|
||||
|
||||
return (bone_name,)
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
arm = obj.data
|
||||
mt = bone_class_instance(obj, METARIG_NAMES)
|
||||
mt.cpy = definitions[0]
|
||||
mt.update()
|
||||
cp = bone_class_instance(obj, ["cpy"])
|
||||
cp.cpy_e = copy_bone_simple(arm, mt.cpy, base_names[mt.cpy], parent=True)
|
||||
cp.cpy = cp.cpy_e.name
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
cp.update()
|
||||
mt.update()
|
||||
|
||||
con = mt.cpy_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = cp.cpy
|
||||
|
||||
|
||||
# Rotation mode and axis locks
|
||||
cp.cpy_p.rotation_mode = mt.cpy_p.rotation_mode
|
||||
cp.cpy_p.lock_location = tuple(mt.cpy_p.lock_location)
|
||||
cp.cpy_p.lock_rotations_4d = mt.cpy_p.lock_rotations_4d
|
||||
cp.cpy_p.lock_rotation = tuple(mt.cpy_p.lock_rotation)
|
||||
cp.cpy_p.lock_rotation_w = mt.cpy_p.lock_rotation_w
|
||||
cp.cpy_p.lock_scale = tuple(mt.cpy_p.lock_scale)
|
||||
|
||||
# Layers
|
||||
cp.cpy_b.layers = list(mt.cpy_b.layers)
|
||||
|
||||
return (mt.cpy,)
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control bone
|
||||
cpy = control(obj, bone_definition, base_names, options)[0]
|
||||
# Create deform bone
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (cpy,)
|
@ -1,162 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = tuple()
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('bonesker')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0000, 0.7382, 0.1895
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('delta')
|
||||
bone.head[:] = -0.0497, 0.8414, 0.3530
|
||||
bone.tail[:] = -0.2511, 1.1588, 0.9653
|
||||
bone.roll = 2.6044
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['bonesker']
|
||||
bone = arm.edit_bones.new('boney')
|
||||
bone.head[:] = 0.7940, 2.5592, 0.4134
|
||||
bone.tail[:] = 0.7940, 3.3975, 0.4890
|
||||
bone.roll = 3.1416
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['delta']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['delta']
|
||||
pbone['type'] = 'delta'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the head, its parent is the body,
|
||||
# its only child the first of a chain with matching basenames.
|
||||
eg.
|
||||
body -> head -> neck_01 -> neck_02 -> neck_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
delta = arm.bones[orig_bone_name]
|
||||
children = delta.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("only 1 child supported for delta on bone '%s'" % delta.name)
|
||||
|
||||
if delta.use_connect:
|
||||
raise RigifyError("bone cannot be connected to its parent '%s'" % delta.name)
|
||||
|
||||
bone_definition = [delta.name, children[0].name]
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
'''
|
||||
Use this bone to define a delta thats applied to its child in pose mode.
|
||||
'''
|
||||
mode_orig = obj.mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
delta_name, child_name = bone_definition
|
||||
|
||||
delta_pbone = obj.pose.bones[delta_name]
|
||||
|
||||
arm = obj.data
|
||||
child_pbone = obj.pose.bones[child_name]
|
||||
|
||||
delta_phead = delta_pbone.head.copy()
|
||||
delta_ptail = delta_pbone.tail.copy()
|
||||
delta_pmatrix = delta_pbone.matrix.copy()
|
||||
|
||||
child_phead = child_pbone.head.copy()
|
||||
child_ptail = child_pbone.tail.copy()
|
||||
child_pmatrix = child_pbone.matrix.copy()
|
||||
|
||||
|
||||
children = delta_pbone.children
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
delta_ebone = arm.edit_bones[delta_name]
|
||||
child_ebone = arm.edit_bones[child_name]
|
||||
|
||||
delta_head = delta_ebone.head.copy()
|
||||
delta_tail = delta_ebone.tail.copy()
|
||||
|
||||
child_head = child_ebone.head.copy()
|
||||
child_tail = child_ebone.tail.copy()
|
||||
|
||||
#arm.edit_bones.remove(delta_ebone)
|
||||
#del delta_ebone # cant use this
|
||||
del child_pbone
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
# Move the child bone to the deltas location
|
||||
obj.animation_data_create()
|
||||
delta_pbone = obj.pose.bones[delta_name]
|
||||
# child_pbone = obj.pose.bones[child_name]
|
||||
|
||||
# ------------------- drivers
|
||||
|
||||
delta_pbone.rotation_mode = 'XYZ'
|
||||
|
||||
rot = delta_pmatrix.invert().rotation_part() * child_pmatrix.rotation_part()
|
||||
rot = rot.invert().to_euler()
|
||||
|
||||
fcurve_drivers = delta_pbone.driver_add("rotation_euler", -1)
|
||||
for i, fcurve_driver in enumerate(fcurve_drivers):
|
||||
driver = fcurve_driver.driver
|
||||
driver.type = 'AVERAGE'
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve_driver.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = rot[i]
|
||||
mod.coefficients[1] = 0.0
|
||||
|
||||
# tricky, find the transform to drive the bone to this location.
|
||||
delta_head_offset = child_pmatrix.rotation_part() * (delta_phead - child_phead)
|
||||
|
||||
fcurve_drivers = delta_pbone.driver_add("location", -1)
|
||||
for i, fcurve_driver in enumerate(fcurve_drivers):
|
||||
driver = fcurve_driver.driver
|
||||
driver.type = 'AVERAGE'
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve_driver.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = delta_head_offset[i]
|
||||
mod.coefficients[1] = 0.0
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
|
||||
# no blendeing
|
||||
return None
|
@ -1,405 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from mathutils import Vector
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "eye_balls"
|
||||
|
||||
def addget_shape_key(obj, name="Key"):
|
||||
""" Fetches a shape key, or creates it if it doesn't exist
|
||||
"""
|
||||
# Create a shapekey set if it doesn't already exist
|
||||
if obj.data.shape_keys is None:
|
||||
shape = obj.add_shape_key(name="Basis", from_mix=False)
|
||||
obj.active_shape_key_index = 0
|
||||
|
||||
# Get the shapekey, or create it if it doesn't already exist
|
||||
if name in obj.data.shape_keys.keys:
|
||||
shape_key = obj.data.shape_keys.keys[name]
|
||||
else:
|
||||
shape_key = obj.add_shape_key(name=name, from_mix=False)
|
||||
|
||||
return shape_key
|
||||
|
||||
|
||||
def addget_shape_key_driver(obj, name="Key"):
|
||||
""" Fetches the driver for the shape key, or creates it if it doesn't
|
||||
already exist.
|
||||
"""
|
||||
driver_path = 'keys["' + name + '"].value'
|
||||
fcurve = None
|
||||
driver = None
|
||||
new = False
|
||||
if obj.data.shape_keys.animation_data is not None:
|
||||
for driver_s in obj.data.shape_keys.animation_data.drivers:
|
||||
if driver_s.data_path == driver_path:
|
||||
fcurve = driver_s
|
||||
if fcurve is None:
|
||||
fcurve = obj.data.shape_keys.keys[name].driver_add("value")
|
||||
fcurve.driver.type = 'AVERAGE'
|
||||
new = True
|
||||
|
||||
return fcurve, new
|
||||
|
||||
|
||||
def create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression):
|
||||
""" Creates/gets a shape key and sets up a driver for it.
|
||||
|
||||
obj = armature object
|
||||
bone = driving bone name
|
||||
meshes = list of meshes to create the shapekey/driver on
|
||||
shape_name = name of the shape key
|
||||
var_name = name of the driving variable
|
||||
var_path = path to the property on the bone to drive with
|
||||
expression = python expression for the driver
|
||||
"""
|
||||
pb = obj.pose.bones
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
|
||||
# Add/get the shape key
|
||||
shape = addget_shape_key(mesh_obj, name=shape_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve, a = addget_shape_key_driver(mesh_obj, name=shape_name)
|
||||
|
||||
# Set up the driver
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = expression
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "SINGLE_PROP"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = 'pose.bones["' + bone + '"]' + var_path
|
||||
|
||||
|
||||
def mark_actions():
|
||||
for action in bpy.data.actions:
|
||||
action.tag = True
|
||||
|
||||
def get_unmarked_action():
|
||||
for action in bpy.data.actions:
|
||||
if action.tag != True:
|
||||
return action
|
||||
return None
|
||||
|
||||
def add_action(name=None):
|
||||
mark_actions()
|
||||
bpy.ops.action.new()
|
||||
action = get_unmarked_action()
|
||||
if name is not None:
|
||||
action.name = name
|
||||
return action
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bone = obj.data.bones[orig_bone_name]
|
||||
chain = []
|
||||
|
||||
try:
|
||||
chain += [bone.parent.name, bone.name]
|
||||
except AttributeError:
|
||||
raise RigifyError("'%s' rig type requires a parent (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
return chain
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
# Get list of eyes
|
||||
if "eyes" in options:
|
||||
eye_base_names = options["eyes"].replace(" ", "").split(",")
|
||||
else:
|
||||
eye_base_names = []
|
||||
|
||||
# Get their ORG- names
|
||||
eyes = []
|
||||
for name in eye_base_names:
|
||||
eyes += ["ORG-"+name]
|
||||
|
||||
# Duplicate the eyes to make deformation bones
|
||||
def_eyes = [] # def/org pairs
|
||||
for eye in eyes:
|
||||
def_eyes += [(copy_bone_simple(obj.data, eye, "DEF-"+base_names[eye], parent=True).name, eye)]
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
for eye in def_eyes:
|
||||
con = pb[eye[0]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = eye[1]
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
head = definitions[0]
|
||||
eye_target = definitions[1]
|
||||
|
||||
# Get list of pupil mesh objects
|
||||
if "mesh" in options:
|
||||
pupil_meshes = options["mesh"].replace(" ", "").split(",")
|
||||
else:
|
||||
pupil_meshes = []
|
||||
|
||||
# Get list of eyes
|
||||
if "eyes" in options:
|
||||
eye_base_names = options["eyes"].replace(" ", "").split(",")
|
||||
else:
|
||||
eye_base_names = []
|
||||
|
||||
# Get their ORG- names
|
||||
eyes = []
|
||||
for name in eye_base_names:
|
||||
eyes += ["ORG-"+name]
|
||||
|
||||
# Get the average position of the eyes
|
||||
center = Vector((0, 0, 0))
|
||||
for eye in eyes:
|
||||
center += eb[eye].head
|
||||
if len(eyes) != 0:
|
||||
center /= len(eyes)
|
||||
|
||||
# Get the average length of the eyes
|
||||
length = 0.0
|
||||
for eye in eyes:
|
||||
length += eb[eye].length
|
||||
if len(eyes) == 0:
|
||||
length = 1.0
|
||||
else:
|
||||
length /= len(eyes)
|
||||
|
||||
|
||||
# Make the mind's eye
|
||||
minds_eye = copy_bone_simple(obj.data, eye_target, "MCH-"+base_names[eye_target]+".mind", parent=True).name
|
||||
eb[minds_eye].head = center
|
||||
eb[minds_eye].tail = eb[eye_target].head
|
||||
eb[minds_eye].roll = 0.0
|
||||
eb[minds_eye].length = length
|
||||
|
||||
# Create org/copy/control eye sets
|
||||
eye_sets = []
|
||||
for eye in eyes:
|
||||
copy = copy_bone_simple(obj.data, minds_eye, "MCH-"+base_names[eye]+".cpy", parent=True).name
|
||||
eb[copy].translate(eb[eye].head - eb[copy].head)
|
||||
eb[copy].parent = eb[eye].parent
|
||||
|
||||
control = copy_bone_simple(obj.data, eye, base_names[eye], parent=True).name
|
||||
eb[control].parent = eb[copy]
|
||||
|
||||
eye_sets += [(eye, copy, control)]
|
||||
|
||||
# Bones for parent/free switch for eye target
|
||||
target_ctrl = copy_bone_simple(obj.data, eye_target, base_names[eye_target], parent=True).name
|
||||
parent = copy_bone_simple(obj.data, head, "MCH-eye_target_parent", parent=False).name
|
||||
|
||||
eb[target_ctrl].parent = eb[parent]
|
||||
|
||||
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Axis locks
|
||||
pb[target_ctrl].lock_scale = False, True, True
|
||||
|
||||
# Add eye_spread action if it doesn't already exist
|
||||
action_name = "eye_spread"
|
||||
if action_name in bpy.data.actions:
|
||||
spread_action = bpy.data.actions[action_name]
|
||||
else:
|
||||
spread_action = add_action(name=action_name)
|
||||
|
||||
# Add free property
|
||||
prop_name = "free"
|
||||
prop = rna_idprop_ui_prop_get(pb[target_ctrl], prop_name, create=True)
|
||||
pb[target_ctrl][prop_name] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
free_driver_path = pb[target_ctrl].path_from_id() + '["free"]'
|
||||
|
||||
# Constraints
|
||||
# Mind's eye tracks eye target control
|
||||
con = pb[minds_eye].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = target_ctrl
|
||||
|
||||
# Parent copies transforms of head
|
||||
con = pb[parent].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = head
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "free"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = free_driver_path
|
||||
|
||||
# Eye set's constraints
|
||||
for eye in eye_sets:
|
||||
# Org copies transforms of control
|
||||
con = pb[eye[0]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = eye[2]
|
||||
|
||||
# Copy copies rotation of mind's eye
|
||||
con = pb[eye[1]].constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = minds_eye
|
||||
|
||||
# Control gets action constraint for eye spread
|
||||
con = pb[eye[2]].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = target_ctrl
|
||||
con.action = spread_action
|
||||
con.transform_channel = 'SCALE_X'
|
||||
con.frame_start = -20
|
||||
con.frame_end = 20
|
||||
con.min = 0.0
|
||||
con.max = 2.0
|
||||
con.target_space = 'LOCAL'
|
||||
|
||||
|
||||
# Get/create the shape keys and drivers for pupil dilation
|
||||
shape_names = ["PUPILS-dilate_wide", "PUPILS-dilate_narrow"]
|
||||
slider_name = "pupil_dilate"
|
||||
|
||||
# Set up the custom property on the bone
|
||||
prop = rna_idprop_ui_prop_get(pb[target_ctrl], slider_name, create=True)
|
||||
pb[target_ctrl][slider_name] = 0.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
if len(shape_names) > 1:
|
||||
prop["min"] = -1.0
|
||||
prop["soft_min"] = -1.0
|
||||
|
||||
# Add the shape drivers
|
||||
# Positive
|
||||
if shape_names[0] != "":
|
||||
# Set up the variables for creating the shape key driver
|
||||
shape_name = shape_names[0]
|
||||
var_name = slider_name.replace(".", "_").replace("-", "_")
|
||||
var_path = '["' + slider_name + '"]'
|
||||
if slider_name + "_fac" in options:
|
||||
fac = options[slider_name + "_fac"]
|
||||
else:
|
||||
fac = 1.0
|
||||
expression = var_name + " * " + str(fac)
|
||||
# Create the shape key driver
|
||||
create_shape_and_driver(obj, target_ctrl, pupil_meshes, shape_name, var_name, var_path, expression)
|
||||
# Negative
|
||||
if shape_names[0] != "" and len(shape_names) > 1:
|
||||
# Set up the variables for creating the shape key driver
|
||||
shape_name = shape_names[1]
|
||||
var_name = slider_name.replace(".", "_").replace("-", "_")
|
||||
var_path = '["' + slider_name + '"]'
|
||||
if slider_name + "_fac" in options:
|
||||
fac = options[slider_name + "_fac"]
|
||||
else:
|
||||
fac = 1.0
|
||||
expression = var_name + " * " + str(fac) + " * -1"
|
||||
# Create the shape key driver
|
||||
create_shape_and_driver(obj, target_ctrl, pupil_meshes, shape_name, var_name, var_path, expression)
|
||||
|
||||
|
||||
|
||||
# Set layers
|
||||
#layer = list(bb[definitions[2]].layers)
|
||||
#bb[lid1].layers = layer
|
||||
#bb[lid2].layers = layer
|
||||
#bb[lid3].layers = layer
|
||||
#bb[lid4].layers = layer
|
||||
#bb[lid5].layers = layer
|
||||
#bb[lid6].layers = layer
|
||||
#bb[lid7].layers = layer
|
||||
#bb[lid8].layers = layer
|
||||
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control rig
|
||||
control(obj, bone_definition, base_names, options)
|
||||
# Create deform rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (None,)
|
||||
|
@ -1,687 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from math import acos
|
||||
from mathutils import Vector
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "eye_lid"
|
||||
|
||||
def mark_actions():
|
||||
for action in bpy.data.actions:
|
||||
action.tag = True
|
||||
|
||||
def get_unmarked_action():
|
||||
for action in bpy.data.actions:
|
||||
if action.tag != True:
|
||||
return action
|
||||
return None
|
||||
|
||||
def add_action(name=None):
|
||||
mark_actions()
|
||||
bpy.ops.action.new()
|
||||
action = get_unmarked_action()
|
||||
if name is not None:
|
||||
action.name = name
|
||||
return action
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bb = obj.data.bones
|
||||
bone = bb[orig_bone_name]
|
||||
chain = []
|
||||
|
||||
try:
|
||||
chain += [bone.parent.parent.name, bone.parent.name, bone.name]
|
||||
except AttributeError:
|
||||
raise RigifyError("'%s' rig type requires a chain of two parents (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
chain += [child.name for child in bone.children_recursive_basename]
|
||||
|
||||
if len(chain) < 10:
|
||||
raise RigifyError("'%s' rig type requires a chain of 10 bones (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
chain = chain[:10]
|
||||
|
||||
try:
|
||||
chain += [bb[chain[9]].children[0].name]
|
||||
chain += [bb[chain[10]].children[0].name]
|
||||
except IndexError:
|
||||
raise RigifyError("'%s' rig type requires a chain of 10 bones (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
return chain
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
|
||||
# Upper lid MCH
|
||||
lid1 = make_lid_stretch_bone(obj, "MCH-lid", definitions[2], definitions[3], 1.0)
|
||||
lid2 = make_lid_stretch_bone(obj, "MCH-lid", definitions[3], definitions[4], 1.0)
|
||||
lid22 = make_lid_stretch_bone(obj, "MCH-lid", definitions[4], definitions[5], 1.0)
|
||||
lid33 = make_lid_stretch_bone(obj, "MCH-lid", definitions[4], definitions[3], 1.0)
|
||||
lid3 = make_lid_stretch_bone(obj, "MCH-lid", definitions[5], definitions[4], 1.0)
|
||||
lid4 = make_lid_stretch_bone(obj, "MCH-lid", definitions[6], definitions[5], 1.0)
|
||||
|
||||
dlid22 = copy_bone_simple(obj.data, lid22, "MCH-lid", parent=True).name
|
||||
dlid33 = copy_bone_simple(obj.data, lid33, "MCH-lid", parent=True).name
|
||||
eb[dlid22].bbone_segments = 8
|
||||
eb[dlid33].bbone_segments = 8
|
||||
|
||||
eb[lid1].parent = eb[definitions[2]]
|
||||
eb[lid2].parent = eb[definitions[3]]
|
||||
eb[lid22].parent = eb[definitions[4]]
|
||||
eb[lid33].parent = eb[definitions[4]]
|
||||
eb[lid3].parent = eb[definitions[5]]
|
||||
eb[lid4].parent = eb[definitions[6]]
|
||||
|
||||
# Lower lid MCH
|
||||
lid5 = make_lid_stretch_bone(obj, "MCH-lid", definitions[6], definitions[7], 1.0)
|
||||
lid6 = make_lid_stretch_bone(obj, "MCH-lid", definitions[7], definitions[8], 1.0)
|
||||
lid66 = make_lid_stretch_bone(obj, "MCH-lid", definitions[8], definitions[9], 1.0)
|
||||
lid77 = make_lid_stretch_bone(obj, "MCH-lid", definitions[8], definitions[7], 1.0)
|
||||
lid7 = make_lid_stretch_bone(obj, "MCH-lid", definitions[9], definitions[8], 1.0)
|
||||
lid8 = make_lid_stretch_bone(obj, "MCH-lid", definitions[2], definitions[9], 1.0)
|
||||
|
||||
dlid66 = copy_bone_simple(obj.data, lid66, "MCH-lid", parent=True).name
|
||||
dlid77 = copy_bone_simple(obj.data, lid77, "MCH-lid", parent=True).name
|
||||
eb[dlid66].bbone_segments = 8
|
||||
eb[dlid77].bbone_segments = 8
|
||||
|
||||
eb[lid5].parent = eb[definitions[6]]
|
||||
eb[lid6].parent = eb[definitions[7]]
|
||||
eb[lid66].parent = eb[definitions[8]]
|
||||
eb[lid77].parent = eb[definitions[8]]
|
||||
eb[lid7].parent = eb[definitions[9]]
|
||||
eb[lid8].parent = eb[definitions[2]]
|
||||
|
||||
# Upper lid DEF
|
||||
dlid1 = copy_bone_simple(obj.data, lid1, "DEF-" + base_names[definitions[2]], parent=True).name
|
||||
dlid2 = copy_bone_simple(obj.data, lid2, "DEF-" + base_names[definitions[3]], parent=True).name
|
||||
dlid3 = copy_bone_simple(obj.data, lid3, "DEF-" + base_names[definitions[4]], parent=True).name
|
||||
dlid4 = copy_bone_simple(obj.data, lid4, "DEF-" + base_names[definitions[5]], parent=True).name
|
||||
|
||||
eb[dlid2].parent = eb[dlid1]
|
||||
eb[dlid22].parent = eb[dlid2]
|
||||
|
||||
eb[dlid3].parent = eb[dlid4]
|
||||
eb[dlid33].parent = eb[dlid3]
|
||||
|
||||
eb[dlid2].use_connect = True
|
||||
eb[dlid22].use_connect = True
|
||||
eb[dlid3].use_connect = True
|
||||
eb[dlid33].use_connect = True
|
||||
|
||||
eb[dlid1].bbone_segments = 8
|
||||
eb[dlid2].bbone_segments = 8
|
||||
eb[dlid3].bbone_segments = 8
|
||||
eb[dlid4].bbone_segments = 8
|
||||
|
||||
# Lower lid DEF
|
||||
dlid5 = copy_bone_simple(obj.data, lid5, "DEF-" + base_names[definitions[6]], parent=True).name
|
||||
dlid6 = copy_bone_simple(obj.data, lid6, "DEF-" + base_names[definitions[7]], parent=True).name
|
||||
dlid7 = copy_bone_simple(obj.data, lid7, "DEF-" + base_names[definitions[8]], parent=True).name
|
||||
dlid8 = copy_bone_simple(obj.data, lid8, "DEF-" + base_names[definitions[9]], parent=True).name
|
||||
|
||||
eb[dlid6].parent = eb[dlid5]
|
||||
eb[dlid66].parent = eb[dlid6]
|
||||
|
||||
eb[dlid7].parent = eb[dlid8]
|
||||
eb[dlid77].parent = eb[dlid7]
|
||||
|
||||
eb[dlid6].use_connect = True
|
||||
eb[dlid66].use_connect = True
|
||||
eb[dlid7].use_connect = True
|
||||
eb[dlid77].use_connect = True
|
||||
|
||||
eb[dlid5].bbone_segments = 8
|
||||
eb[dlid6].bbone_segments = 8
|
||||
eb[dlid7].bbone_segments = 8
|
||||
eb[dlid8].bbone_segments = 8
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
con = pb[dlid1].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid1
|
||||
|
||||
con = pb[dlid22].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid22
|
||||
|
||||
con = pb[dlid33].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid33
|
||||
|
||||
con = pb[dlid2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid2
|
||||
|
||||
con = pb[dlid3].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid3
|
||||
|
||||
con = pb[dlid4].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid4
|
||||
|
||||
con = pb[dlid5].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid5
|
||||
|
||||
con = pb[dlid6].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid6
|
||||
|
||||
con = pb[dlid66].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid66
|
||||
|
||||
con = pb[dlid77].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid77
|
||||
|
||||
con = pb[dlid7].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid7
|
||||
|
||||
con = pb[dlid8].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lid8
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
head_e = eb[definitions[0]]
|
||||
eye_e = eb[definitions[1]]
|
||||
|
||||
|
||||
# Make eye "flower"
|
||||
flo1 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[2]]+".flower", parent=True).name
|
||||
flo2 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[3]]+".flower", parent=True).name
|
||||
flo3 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[4]]+".flower", parent=True).name
|
||||
flo4 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[5]]+".flower", parent=True).name
|
||||
flo5 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[6]]+".flower", parent=True).name
|
||||
flo6 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[7]]+".flower", parent=True).name
|
||||
flo7 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[8]]+".flower", parent=True).name
|
||||
flo8 = copy_bone_simple(obj.data, definitions[1], "MCH-"+base_names[definitions[9]]+".flower", parent=True).name
|
||||
|
||||
eb[flo1].tail = eb[definitions[2]].head
|
||||
eb[flo2].tail = eb[definitions[3]].head
|
||||
eb[flo3].tail = eb[definitions[4]].head
|
||||
eb[flo4].tail = eb[definitions[5]].head
|
||||
eb[flo5].tail = eb[definitions[6]].head
|
||||
eb[flo6].tail = eb[definitions[7]].head
|
||||
eb[flo7].tail = eb[definitions[8]].head
|
||||
eb[flo8].tail = eb[definitions[9]].head
|
||||
|
||||
|
||||
# Make eye lids on tips of flowers
|
||||
flid1 = copy_bone_simple(obj.data, definitions[2], "MCH-"+base_names[definitions[2]]).name
|
||||
flid2 = copy_bone_simple(obj.data, definitions[3], "MCH-"+base_names[definitions[3]]).name
|
||||
flid3 = copy_bone_simple(obj.data, definitions[4], "MCH-"+base_names[definitions[4]]).name
|
||||
flid4 = copy_bone_simple(obj.data, definitions[5], "MCH-"+base_names[definitions[5]]).name
|
||||
flid5 = copy_bone_simple(obj.data, definitions[6], "MCH-"+base_names[definitions[6]]).name
|
||||
flid6 = copy_bone_simple(obj.data, definitions[7], "MCH-"+base_names[definitions[7]]).name
|
||||
flid7 = copy_bone_simple(obj.data, definitions[8], "MCH-"+base_names[definitions[8]]).name
|
||||
flid8 = copy_bone_simple(obj.data, definitions[9], "MCH-"+base_names[definitions[9]]).name
|
||||
|
||||
eb[flid1].parent = eb[flo1]
|
||||
eb[flid2].parent = eb[flo2]
|
||||
eb[flid3].parent = eb[flo3]
|
||||
eb[flid4].parent = eb[flo4]
|
||||
eb[flid5].parent = eb[flo5]
|
||||
eb[flid6].parent = eb[flo6]
|
||||
eb[flid7].parent = eb[flo7]
|
||||
eb[flid8].parent = eb[flo8]
|
||||
|
||||
|
||||
# Make eye lid controls
|
||||
lid1 = copy_bone_simple(obj.data, definitions[2], base_names[definitions[2]]).name
|
||||
lid2 = copy_bone_simple(obj.data, definitions[3], base_names[definitions[3]]).name
|
||||
lid3 = copy_bone_simple(obj.data, definitions[4], base_names[definitions[4]]).name
|
||||
lid4 = copy_bone_simple(obj.data, definitions[5], base_names[definitions[5]]).name
|
||||
lid5 = copy_bone_simple(obj.data, definitions[6], base_names[definitions[6]]).name
|
||||
lid6 = copy_bone_simple(obj.data, definitions[7], base_names[definitions[7]]).name
|
||||
lid7 = copy_bone_simple(obj.data, definitions[8], base_names[definitions[8]]).name
|
||||
lid8 = copy_bone_simple(obj.data, definitions[9], base_names[definitions[9]]).name
|
||||
|
||||
size = eb[lid1].length
|
||||
size_y = Vector(0.0, size, 0.0)
|
||||
eb[lid1].tail = eb[lid1].head + size_y
|
||||
eb[lid2].tail = eb[lid2].head + size_y
|
||||
eb[lid3].tail = eb[lid3].head + size_y
|
||||
eb[lid4].tail = eb[lid4].head + size_y
|
||||
eb[lid5].tail = eb[lid5].head + size_y
|
||||
eb[lid6].tail = eb[lid6].head + size_y
|
||||
eb[lid7].tail = eb[lid7].head + size_y
|
||||
eb[lid8].tail = eb[lid8].head + size_y
|
||||
|
||||
eb[lid1].roll = 0
|
||||
eb[lid2].roll = 0
|
||||
eb[lid3].roll = 0
|
||||
eb[lid4].roll = 0
|
||||
eb[lid5].roll = 0
|
||||
eb[lid6].roll = 0
|
||||
eb[lid7].roll = 0
|
||||
eb[lid8].roll = 0
|
||||
|
||||
eb[lid1].parent = head_e
|
||||
eb[lid2].parent = head_e
|
||||
eb[lid3].parent = head_e
|
||||
eb[lid4].parent = head_e
|
||||
eb[lid5].parent = head_e
|
||||
eb[lid6].parent = head_e
|
||||
eb[lid7].parent = head_e
|
||||
eb[lid8].parent = head_e
|
||||
|
||||
lower_lid_ctrl = copy_bone_simple(obj.data, definitions[10], base_names[definitions[10]]).name
|
||||
upper_lid_ctrl = copy_bone_simple(obj.data, definitions[11], base_names[definitions[11]]).name
|
||||
eb[lower_lid_ctrl].parent = head_e
|
||||
eb[upper_lid_ctrl].parent = head_e
|
||||
distance = (eb[lower_lid_ctrl].head - eb[upper_lid_ctrl].head).length
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Axis locks
|
||||
pb[lower_lid_ctrl].lock_location = True, False, True
|
||||
pb[upper_lid_ctrl].lock_location = True, False, True
|
||||
|
||||
# Add eye close action if it doesn't already exist
|
||||
action_name = "eye_close"
|
||||
if action_name in bpy.data.actions:
|
||||
close_action = bpy.data.actions[action_name]
|
||||
else:
|
||||
close_action = add_action(name=action_name)
|
||||
|
||||
# Add close property (useful when making the animation in the action)
|
||||
prop_name = "close_action"
|
||||
prop = rna_idprop_ui_prop_get(pb[upper_lid_ctrl], prop_name, create=True)
|
||||
pb[upper_lid_ctrl][prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
close_driver_path = pb[upper_lid_ctrl].path_from_id() + '["close_action"]'
|
||||
|
||||
# Constraints
|
||||
|
||||
# Flowers track lid controls
|
||||
con = pb[flo1].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid1
|
||||
|
||||
con = pb[flo2].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid2
|
||||
|
||||
con = pb[flo3].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid3
|
||||
|
||||
con = pb[flo4].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid4
|
||||
|
||||
con = pb[flo5].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid5
|
||||
|
||||
con = pb[flo6].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid6
|
||||
|
||||
con = pb[flo7].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid7
|
||||
|
||||
con = pb[flo8].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lid8
|
||||
|
||||
|
||||
# ORG bones to flower lids
|
||||
con = pb[definitions[2]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid1
|
||||
|
||||
con = pb[definitions[3]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid2
|
||||
|
||||
con = pb[definitions[4]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid3
|
||||
|
||||
con = pb[definitions[5]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid4
|
||||
|
||||
con = pb[definitions[6]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid5
|
||||
|
||||
con = pb[definitions[7]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid6
|
||||
|
||||
con = pb[definitions[8]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid7
|
||||
|
||||
con = pb[definitions[9]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = flid8
|
||||
|
||||
|
||||
# Action constraints, upper lid
|
||||
con = pb[lid1].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = upper_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance*2
|
||||
con.max = distance
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
|
||||
con = pb[lid2].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = upper_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance*2
|
||||
con.max = distance
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid3].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = upper_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance*2
|
||||
con.max = distance
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid4].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = upper_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance*2
|
||||
con.max = distance
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid5].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = upper_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance*2
|
||||
con.max = distance
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
# Action constraints, lower lid
|
||||
con = pb[lid5].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = lower_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance
|
||||
con.max = distance*2
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid6].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = lower_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance
|
||||
con.max = distance*2
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid7].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = lower_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance
|
||||
con.max = distance*2
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid8].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = lower_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance
|
||||
con.max = distance*2
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
con = pb[lid1].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = lower_lid_ctrl
|
||||
con.action = close_action
|
||||
con.transform_channel = 'LOCATION_Y'
|
||||
con.frame_start = -30
|
||||
con.frame_end = 30
|
||||
con.min = -distance
|
||||
con.max = distance*2
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = close_driver_path
|
||||
|
||||
|
||||
|
||||
|
||||
# Set layers
|
||||
layer = list(bb[definitions[2]].layers)
|
||||
bb[lid1].layers = layer
|
||||
bb[lid2].layers = layer
|
||||
bb[lid3].layers = layer
|
||||
bb[lid4].layers = layer
|
||||
bb[lid5].layers = layer
|
||||
bb[lid6].layers = layer
|
||||
bb[lid7].layers = layer
|
||||
bb[lid8].layers = layer
|
||||
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control rig
|
||||
control(obj, bone_definition, base_names, options)
|
||||
# Create deform rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def make_lid_stretch_bone(obj, name, bone1, bone2, roll_alpha):
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
# Create the bone, pointing from bone1 to bone2
|
||||
bone_e = copy_bone_simple(obj.data, bone1, name, parent=True)
|
||||
bone_e.use_connect = False
|
||||
bone_e.tail = eb[bone2].head
|
||||
bone = bone_e.name
|
||||
|
||||
# Align the bone roll with the average direction of bone1 and bone2
|
||||
vec = bone_e.y_axis.cross(((1.0-roll_alpha)*eb[bone1].y_axis) + (roll_alpha*eb[bone2].y_axis)).normalize()
|
||||
|
||||
ang = acos(vec * bone_e.x_axis)
|
||||
|
||||
bone_e.roll += ang
|
||||
c1 = vec * bone_e.x_axis
|
||||
bone_e.roll -= (ang*2)
|
||||
c2 = vec * bone_e.x_axis
|
||||
|
||||
if c1 > c2:
|
||||
bone_e.roll += (ang*2)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bone_p = pb[bone]
|
||||
|
||||
# Constrains
|
||||
con = bone_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone1
|
||||
|
||||
con = bone_p.constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = bone2
|
||||
|
||||
con = bone_p.constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = bone2
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
return bone
|
@ -1,378 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple, get_side_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
METARIG_NAMES = "finger_01", "finger_02", "finger_03"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('finger.01')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0353, -0.0184, -0.0053
|
||||
bone.roll = -2.8722
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('finger.02')
|
||||
bone.head[:] = 0.0353, -0.0184, -0.0053
|
||||
bone.tail[:] = 0.0702, -0.0364, -0.0146
|
||||
bone.roll = -2.7099
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger.01']
|
||||
bone = arm.edit_bones.new('finger.03')
|
||||
bone.head[:] = 0.0702, -0.0364, -0.0146
|
||||
bone.tail[:] = 0.0903, -0.0461, -0.0298
|
||||
bone.roll = -2.1709
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger.02']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['finger.01']
|
||||
pbone['type'] = 'finger_curl'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain with at least 1 child of the same base name.
|
||||
eg.
|
||||
finger_01 -> finger_02
|
||||
'''
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
|
||||
bone_definition = [orig_bone.name]
|
||||
|
||||
bone_definition.extend([child.name for child in orig_bone.children_recursive_basename])
|
||||
|
||||
if len(bone_definition) < 2:
|
||||
raise RigifyError("expected the chain to have at least 1 child from bone '%s' without the same base name" % orig_bone_name)
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
""" Creates the deform rig.
|
||||
"""
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
three_digits = True if len(definitions) > 2 else False
|
||||
|
||||
# Create base digit bones: two bones, each half of the base digit.
|
||||
f1a = copy_bone_simple(obj.data, definitions[0], "DEF-%s.01" % base_names[definitions[0]], parent=True)
|
||||
f1b = copy_bone_simple(obj.data, definitions[0], "DEF-%s.02" % base_names[definitions[0]], parent=True)
|
||||
f1a.use_connect = False
|
||||
f1b.use_connect = False
|
||||
f1b.parent = f1a
|
||||
center = f1a.center
|
||||
f1a.tail = center
|
||||
f1b.head = center
|
||||
|
||||
# Create the other deform bones.
|
||||
f2 = copy_bone_simple(obj.data, definitions[1], "DEF-%s" % base_names[definitions[1]], parent=True)
|
||||
if three_digits:
|
||||
f3 = copy_bone_simple(obj.data, definitions[2], "DEF-%s" % base_names[definitions[2]], parent=True)
|
||||
|
||||
# Store names before leaving edit mode
|
||||
f1a_name = f1a.name
|
||||
f1b_name = f1b.name
|
||||
f2_name = f2.name
|
||||
if three_digits:
|
||||
f3_name = f3.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bones
|
||||
f1a = obj.pose.bones[f1a_name]
|
||||
f1b = obj.pose.bones[f1b_name]
|
||||
f2 = obj.pose.bones[f2_name]
|
||||
if three_digits:
|
||||
f3 = obj.pose.bones[f3_name]
|
||||
|
||||
# Constrain the base digit's bones
|
||||
con = f1a.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
con = f1a.constraints.new('COPY_SCALE')
|
||||
con.name = "copy_scale"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[0]
|
||||
|
||||
con = f1b.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[0]
|
||||
|
||||
# Constrain the other digit's bones
|
||||
con = f2.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_transforms"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
if three_digits:
|
||||
con = f3.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_transforms"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# *** EDITMODE
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
three_digits = True if len(bone_definition) > 2 else False
|
||||
|
||||
# get assosiated data
|
||||
arm = obj.data
|
||||
bb = obj.data.bones
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
org_f1 = bone_definition[0] # Original finger bone 01
|
||||
org_f2 = bone_definition[1] # Original finger bone 02
|
||||
if three_digits:
|
||||
org_f3 = bone_definition[2] # Original finger bone 03
|
||||
|
||||
# Check options
|
||||
if "bend_ratio" in options:
|
||||
bend_ratio = options["bend_ratio"]
|
||||
else:
|
||||
bend_ratio = 0.4
|
||||
|
||||
yes = [1, 1.0, True, "True", "true", "Yes", "yes"]
|
||||
make_hinge = False
|
||||
if ("hinge" in options) and (eb[org_f1].parent is not None):
|
||||
if options["hinge"] in yes:
|
||||
make_hinge = True
|
||||
|
||||
|
||||
# Needed if its a new armature with no keys
|
||||
obj.animation_data_create()
|
||||
|
||||
# Create the control bone
|
||||
base_name = base_names[bone_definition[0]].split(".", 1)[0]
|
||||
if three_digits:
|
||||
tot_len = eb[org_f1].length + eb[org_f2].length + eb[org_f3].length
|
||||
else:
|
||||
tot_len = eb[org_f1].length + eb[org_f2].length
|
||||
control = copy_bone_simple(arm, bone_definition[0], base_name + get_side_name(base_names[bone_definition[0]]), parent=True).name
|
||||
eb[control].use_connect = eb[org_f1].use_connect
|
||||
eb[control].parent = eb[org_f1].parent
|
||||
eb[control].length = tot_len
|
||||
|
||||
# Create secondary control bones
|
||||
f1 = copy_bone_simple(arm, bone_definition[0], base_names[bone_definition[0]]).name
|
||||
f2 = copy_bone_simple(arm, bone_definition[1], base_names[bone_definition[1]]).name
|
||||
if three_digits:
|
||||
f3 = copy_bone_simple(arm, bone_definition[2], base_names[bone_definition[2]]).name
|
||||
|
||||
# Create driver bones
|
||||
df1 = copy_bone_simple(arm, bone_definition[0], "MCH-" + base_names[bone_definition[0]]).name
|
||||
eb[df1].length /= 2
|
||||
df2 = copy_bone_simple(arm, bone_definition[1], "MCH-" + base_names[bone_definition[1]]).name
|
||||
eb[df2].length /= 2
|
||||
if three_digits:
|
||||
df3 = copy_bone_simple(arm, bone_definition[2], "MCH-" + base_names[bone_definition[2]]).name
|
||||
eb[df3].length /= 2
|
||||
|
||||
# Set parents of the bones, interleaving the driver bones with the secondary control bones
|
||||
if three_digits:
|
||||
eb[f3].use_connect = False
|
||||
eb[df3].use_connect = False
|
||||
eb[f2].use_connect = False
|
||||
eb[df2].use_connect = False
|
||||
eb[f1].use_connect = False
|
||||
eb[df1].use_connect = eb[org_f1].use_connect
|
||||
|
||||
if three_digits:
|
||||
eb[f3].parent = eb[df3]
|
||||
eb[df3].parent = eb[f2]
|
||||
eb[f2].parent = eb[df2]
|
||||
eb[df2].parent = eb[f1]
|
||||
eb[f1].parent = eb[df1]
|
||||
eb[df1].parent = eb[org_f1].parent
|
||||
|
||||
# Set up bones for hinge
|
||||
if make_hinge:
|
||||
socket = copy_bone_simple(arm, org_f1, "MCH-socket_"+control, parent=True).name
|
||||
hinge = copy_bone_simple(arm, eb[org_f1].parent.name, "MCH-hinge_"+control).name
|
||||
|
||||
eb[control].use_connect = False
|
||||
eb[control].parent = eb[hinge]
|
||||
|
||||
# Create the deform rig while we're still in edit mode
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
|
||||
# *** POSEMODE
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
pb[control].rotation_mode = obj.pose.bones[bone_definition[0]].rotation_mode
|
||||
pb[control].lock_location = True, True, True
|
||||
pb[control].lock_scale = True, False, True
|
||||
pb[f1].rotation_mode = 'YZX'
|
||||
pb[f2].rotation_mode = 'YZX'
|
||||
if three_digits:
|
||||
pb[f3].rotation_mode = 'YZX'
|
||||
pb[f1].lock_location = True, True, True
|
||||
pb[f2].lock_location = True, True, True
|
||||
if three_digits:
|
||||
pb[f3].lock_location = True, True, True
|
||||
pb[df2].rotation_mode = 'YZX'
|
||||
if three_digits:
|
||||
pb[df3].rotation_mode = 'YZX'
|
||||
|
||||
# Add the bend_ratio property to the control bone
|
||||
pb[control]["bend_ratio"] = bend_ratio
|
||||
prop = rna_idprop_ui_prop_get(pb[control], "bend_ratio", create=True)
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# Add hinge property to the control bone
|
||||
if make_hinge:
|
||||
pb[control]["hinge"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(pb[control], "hinge", create=True)
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# Constraints
|
||||
con = pb[df1].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = control
|
||||
|
||||
con = pb[df1].constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = control
|
||||
|
||||
con = pb[org_f1].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = f1
|
||||
|
||||
con = pb[org_f2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = f2
|
||||
|
||||
if three_digits:
|
||||
con = pb[org_f3].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = f3
|
||||
|
||||
if make_hinge:
|
||||
con = pb[hinge].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = bb[org_f1].parent.name
|
||||
|
||||
hinge_driver_path = pb[control].path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
con = pb[control].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = socket
|
||||
|
||||
# Create the drivers for the driver bones (control bone scale rotates driver bones)
|
||||
controller_path = pb[control].path_from_id() # 'pose.bones["%s"]' % control_bone_name
|
||||
|
||||
if three_digits:
|
||||
finger_digits = [df2, df3]
|
||||
else:
|
||||
finger_digits = [df2]
|
||||
|
||||
i = 0
|
||||
for bone in finger_digits:
|
||||
|
||||
# XXX - todo, any number
|
||||
if i == 2:
|
||||
break
|
||||
|
||||
pbone = pb[bone]
|
||||
|
||||
pbone.rotation_mode = 'YZX'
|
||||
fcurve_driver = pbone.driver_add("rotation_euler", 0)
|
||||
|
||||
#obj.driver_add('pose.bones["%s"].scale', 1)
|
||||
#obj.animation_data.drivers[-1] # XXX, WATCH THIS
|
||||
driver = fcurve_driver.driver
|
||||
|
||||
# scale target
|
||||
var = driver.variables.new()
|
||||
var.name = "scale"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + '.scale[1]'
|
||||
|
||||
# bend target
|
||||
var = driver.variables.new()
|
||||
var.name = "br"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + '["bend_ratio"]'
|
||||
|
||||
# XXX - todo, any number
|
||||
if three_digits:
|
||||
if i == 0:
|
||||
driver.expression = '(-scale+1.0)*pi*2.0*(1.0-br)'
|
||||
elif i == 1:
|
||||
driver.expression = '(-scale+1.0)*pi*2.0*br'
|
||||
else:
|
||||
driver.expression = driver.expression = '(-scale+1.0)*pi*2.0'
|
||||
|
||||
i += 1
|
||||
|
||||
# Last step setup layers
|
||||
if "ex_layer" in options:
|
||||
layer = [n==options["ex_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(arm.bones[bone_definition[0]].layers)
|
||||
#for bone_name in [f1, f2, f3]:
|
||||
# arm.bones[bone_name].layers = layer
|
||||
arm.bones[f1].layers = layer
|
||||
arm.bones[f2].layers = layer
|
||||
if three_digits:
|
||||
arm.bones[f3].layers = layer
|
||||
|
||||
layer = list(arm.bones[bone_definition[0]].layers)
|
||||
bb[control].layers = layer
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
|
@ -1,501 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from math import pi
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, blend_bone_list, get_side_name, get_base_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
METARIG_NAMES = "hips", "thigh", "shin", "foot", "toe", "heel"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('hips')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 0.2506
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('thigh')
|
||||
bone.head[:] = 0.1253, 0.0000, -0.0000
|
||||
bone.tail[:] = 0.0752, -0.0251, -0.4260
|
||||
bone.roll = 0.1171
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hips']
|
||||
bone = arm.edit_bones.new('shin')
|
||||
bone.head[:] = 0.0752, -0.0251, -0.4260
|
||||
bone.tail[:] = 0.0752, 0.0000, -0.8771
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thigh']
|
||||
bone = arm.edit_bones.new('foot')
|
||||
bone.head[:] = 0.0752, 0.0000, -0.8771
|
||||
bone.tail[:] = 0.1013, -0.1481, -0.9773
|
||||
bone.roll = -0.4662
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['shin']
|
||||
bone = arm.edit_bones.new('toe')
|
||||
bone.head[:] = 0.1013, -0.1481, -0.9773
|
||||
bone.tail[:] = 0.1100, -0.2479, -0.9773
|
||||
bone.roll = 3.1416
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
bone = arm.edit_bones.new('heel')
|
||||
bone.head[:] = 0.0652, 0.0501, -1.0024
|
||||
bone.tail[:] = 0.0927, -0.1002, -1.0024
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['thigh']
|
||||
pbone['type'] = 'leg_biped'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain of at least 3 children.
|
||||
eg.
|
||||
thigh -> shin -> foot -> [toe, heel]
|
||||
'''
|
||||
|
||||
bone_definition = []
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
orig_bone_parent = orig_bone.parent
|
||||
|
||||
if orig_bone_parent is None:
|
||||
raise RigifyError("expected the thigh bone to have a parent hip bone")
|
||||
|
||||
bone_definition.append(orig_bone_parent.name)
|
||||
bone_definition.append(orig_bone.name)
|
||||
|
||||
|
||||
bone = orig_bone
|
||||
chain = 0
|
||||
while chain < 2: # first 2 bones only have 1 child
|
||||
children = bone.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the thigh bone to have 3 children without a fork")
|
||||
bone = children[0]
|
||||
bone_definition.append(bone.name) # shin, foot
|
||||
chain += 1
|
||||
|
||||
children = bone.children
|
||||
# Now there must be 2 children, only one connected
|
||||
if len(children) != 2:
|
||||
raise RigifyError("expected the foot bone:'%s' to have 2 children" % bone.name)
|
||||
|
||||
if children[0].use_connect == children[1].use_connect:
|
||||
raise RigifyError("expected one bone to be connected")
|
||||
|
||||
toe, heel = children
|
||||
if heel.use_connect:
|
||||
toe, heel = heel, toe
|
||||
|
||||
|
||||
bone_definition.append(toe.name)
|
||||
bone_definition.append(heel.name)
|
||||
|
||||
if len(bone_definition) != len(METARIG_NAMES):
|
||||
raise RigifyError("internal problem, expected %d bones" % len(METARIG_NAMES))
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def ik(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
|
||||
# setup the existing bones, use names from METARIG_NAMES
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
mt = bone_class_instance(obj, ["hips", "heel"])
|
||||
|
||||
mt.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
mt_chain.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
|
||||
# children of ik_foot
|
||||
ik = bone_class_instance(obj, ["foot", "foot_roll", "foot_roll_01", "foot_roll_02", "knee_target"])
|
||||
|
||||
# Make a new chain
|
||||
ik_chain = mt_chain.copy(to_fmt="MCH-%s", base_names=base_names)
|
||||
|
||||
# simple rename
|
||||
ik_chain.rename("thigh", ik_chain.thigh + "_ik")
|
||||
ik_chain.rename("shin", ik_chain.shin + "_ik")
|
||||
|
||||
# make sure leg is child of hips
|
||||
ik_chain.thigh_e.parent = mt.hips_e
|
||||
|
||||
# ik foot: no parents
|
||||
base_foot_name = get_base_name(base_names[mt_chain.foot])
|
||||
ik.foot_e = copy_bone_simple(arm, mt.heel, base_foot_name + "_ik" + get_side_name(base_names[mt_chain.foot]))
|
||||
ik.foot = ik.foot_e.name
|
||||
ik.foot_e.translate(mt_chain.foot_e.head - ik.foot_e.head)
|
||||
ik.foot_e.use_local_location = False
|
||||
|
||||
# foot roll: heel pointing backwards, half length
|
||||
ik.foot_roll_e = copy_bone_simple(arm, mt.heel, base_foot_name + "_roll" + get_side_name(base_names[mt_chain.foot]))
|
||||
ik.foot_roll = ik.foot_roll_e.name
|
||||
ik.foot_roll_e.tail = ik.foot_roll_e.head - ik.foot_roll_e.vector / 2.0
|
||||
ik.foot_roll_e.parent = ik.foot_e # heel is disconnected
|
||||
|
||||
# heel pointing forwards to the toe base, parent of the following 2 bones
|
||||
ik.foot_roll_01_e = copy_bone_simple(arm, mt.heel, "MCH-%s_roll.01" % base_foot_name)
|
||||
ik.foot_roll_01 = ik.foot_roll_01_e.name
|
||||
ik.foot_roll_01_e.tail = mt_chain.foot_e.tail
|
||||
ik.foot_roll_01_e.parent = ik.foot_e # heel is disconnected
|
||||
|
||||
# same as above but reverse direction
|
||||
ik.foot_roll_02_e = copy_bone_simple(arm, mt.heel, "MCH-%s_roll.02" % base_foot_name)
|
||||
ik.foot_roll_02 = ik.foot_roll_02_e.name
|
||||
ik.foot_roll_02_e.parent = ik.foot_roll_01_e # heel is disconnected
|
||||
ik.foot_roll_02_e.head = mt_chain.foot_e.tail
|
||||
ik.foot_roll_02_e.tail = mt.heel_e.head
|
||||
|
||||
del base_foot_name
|
||||
|
||||
# rename 'MCH-toe' --> to 'toe_ik' and make the child of ik.foot_roll_01
|
||||
# ------------------ FK or IK?
|
||||
ik_chain.rename("toe", get_base_name(base_names[mt_chain.toe]) + "_ik" + get_side_name(base_names[mt_chain.toe]))
|
||||
ik_chain.toe_e.use_connect = False
|
||||
ik_chain.toe_e.parent = ik.foot_roll_01_e
|
||||
|
||||
# re-parent ik_chain.foot to the
|
||||
ik_chain.foot_e.use_connect = False
|
||||
ik_chain.foot_e.parent = ik.foot_roll_02_e
|
||||
|
||||
|
||||
# knee target is the heel moved up and forward on its local axis
|
||||
ik.knee_target_e = copy_bone_simple(arm, mt.heel, "knee_target" + get_side_name(mt.heel))
|
||||
ik.knee_target = ik.knee_target_e.name
|
||||
offset = ik.knee_target_e.tail - ik.knee_target_e.head
|
||||
offset.z = 0
|
||||
offset.length = mt_chain.shin_e.head.z - mt.heel_e.head.z
|
||||
offset.z += offset.length
|
||||
ik.knee_target_e.translate(offset)
|
||||
ik.knee_target_e.length *= 0.5
|
||||
ik.knee_target_e.parent = ik.foot_e
|
||||
ik.knee_target_e.use_local_location = False
|
||||
|
||||
# roll the bone to point up... could also point in the same direction as ik.foot_roll
|
||||
# ik.foot_roll_02_e.matrix * Vector((0.0, 0.0, 1.0)) # ACK!, no rest matrix in editmode
|
||||
ik.foot_roll_01_e.align_roll((0.0, 0.0, -1.0))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
ik.update()
|
||||
mt_chain.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set IK dof
|
||||
ik_chain.shin_p.lock_ik_x = False
|
||||
ik_chain.shin_p.lock_ik_y = True
|
||||
ik_chain.shin_p.lock_ik_z = True
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
ik.foot_roll_p.rotation_mode = 'XYZ'
|
||||
ik.foot_roll_p.lock_rotation = False, True, True
|
||||
ik_chain.toe_p.rotation_mode = 'YXZ'
|
||||
ik_chain.toe_p.lock_rotation = False, True, True
|
||||
ik_chain.toe_p.lock_location = True, True, True
|
||||
ik.foot_roll_p.lock_location = True, True, True
|
||||
|
||||
# IK
|
||||
con = ik_chain.shin_p.constraints.new('IK')
|
||||
con.chain_count = 2
|
||||
con.iterations = 500
|
||||
con.pole_angle = -pi / 2.0
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.weight = 1.0
|
||||
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.foot
|
||||
|
||||
con.pole_target = obj
|
||||
con.pole_subtarget = ik.knee_target
|
||||
|
||||
# foot roll
|
||||
cons = [ \
|
||||
(ik.foot_roll_01_p.constraints.new('COPY_ROTATION'), ik.foot_roll_01_p.constraints.new('LIMIT_ROTATION')), \
|
||||
(ik.foot_roll_02_p.constraints.new('COPY_ROTATION'), ik.foot_roll_02_p.constraints.new('LIMIT_ROTATION'))]
|
||||
|
||||
for con, con_l in cons:
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll
|
||||
con.use_x, con.use_y, con.use_z = True, False, False
|
||||
con.target_space = con.owner_space = 'LOCAL'
|
||||
|
||||
con = con_l
|
||||
con.use_limit_x, con.use_limit_y, con.use_limit_z = True, False, False
|
||||
con.owner_space = 'LOCAL'
|
||||
|
||||
if con_l is cons[-1][-1]:
|
||||
con.min_x = 0.0
|
||||
con.max_x = 180.0 # XXX -deg
|
||||
else:
|
||||
con.min_x = -180.0 # XXX -deg
|
||||
con.max_x = 0.0
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "ik_layer" in options:
|
||||
layer = [n == options["ik_layer"] for n in range(0, 32)]
|
||||
else:
|
||||
layer = list(mt_chain.thigh_b.layers)
|
||||
for attr in ik_chain.attr_names:
|
||||
getattr(ik_chain, attr + "_b").layers = layer
|
||||
for attr in ik.attr_names:
|
||||
getattr(ik, attr + "_b").layers = layer
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
return (None, ik_chain.thigh, ik_chain.shin, ik_chain.foot, ik_chain.toe, None, ik.foot)
|
||||
|
||||
|
||||
def fk(obj, bone_definition, base_names, options):
|
||||
from mathutils import Vector
|
||||
arm = obj.data
|
||||
|
||||
# these account for all bones in METARIG_NAMES
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
mt = bone_class_instance(obj, ["hips", "heel"])
|
||||
|
||||
# new bones
|
||||
ex = bone_class_instance(obj, ["thigh_socket", "thigh_hinge"])
|
||||
|
||||
for bone_class in (mt, mt_chain):
|
||||
for attr in bone_class.attr_names:
|
||||
i = METARIG_NAMES.index(attr)
|
||||
ebone = arm.edit_bones[bone_definition[i]]
|
||||
setattr(bone_class, attr, ebone.name)
|
||||
bone_class.update()
|
||||
|
||||
ex.thigh_socket_e = copy_bone_simple(arm, mt_chain.thigh, "MCH-%s_socket" % base_names[mt_chain.thigh], parent=True)
|
||||
ex.thigh_socket = ex.thigh_socket_e.name
|
||||
ex.thigh_socket_e.tail = ex.thigh_socket_e.head + Vector((0.0, 0.0, ex.thigh_socket_e.length / 4.0))
|
||||
|
||||
ex.thigh_hinge_e = copy_bone_simple(arm, mt.hips, "MCH-%s_hinge" % base_names[mt_chain.thigh], parent=False)
|
||||
ex.thigh_hinge = ex.thigh_hinge_e.name
|
||||
|
||||
fk_chain = mt_chain.copy(base_names=base_names) # fk has no prefix!
|
||||
fk_chain.foot_e.name = "MCH-" + fk_chain.foot
|
||||
fk_chain.foot = fk_chain.foot_e.name
|
||||
|
||||
# Set up fk foot control
|
||||
foot_e = copy_bone_simple(arm, mt.heel, base_names[mt_chain.foot])
|
||||
foot = foot_e.name
|
||||
foot_e.translate(mt_chain.foot_e.head - foot_e.head)
|
||||
foot_e.parent = fk_chain.shin_e
|
||||
foot_e.use_connect = fk_chain.foot_e.use_connect
|
||||
fk_chain.foot_e.use_connect = False
|
||||
fk_chain.foot_e.parent = foot_e
|
||||
|
||||
fk_chain.thigh_e.use_connect = False
|
||||
fk_chain.thigh_e.parent = ex.thigh_hinge_e
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
ex.update()
|
||||
mt_chain.update()
|
||||
fk_chain.update()
|
||||
foot_p = obj.pose.bones[foot]
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
fk_chain.shin_p.rotation_mode = 'XYZ'
|
||||
fk_chain.shin_p.lock_rotation = False, True, True
|
||||
foot_p.rotation_mode = 'YXZ'
|
||||
fk_chain.toe_p.rotation_mode = 'YXZ'
|
||||
fk_chain.toe_p.lock_rotation = False, True, True
|
||||
fk_chain.thigh_p.lock_location = True, True, True
|
||||
|
||||
con = fk_chain.thigh_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.thigh_socket
|
||||
|
||||
# hinge
|
||||
prop = rna_idprop_ui_prop_get(fk_chain.thigh_p, "hinge", create=True)
|
||||
fk_chain.thigh_p["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = ex.thigh_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = mt.hips
|
||||
|
||||
# add driver
|
||||
hinge_driver_path = fk_chain.thigh_p.path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "fk_layer" in options:
|
||||
layer = [n == options["fk_layer"] for n in range(0, 32)]
|
||||
else:
|
||||
layer = list(mt_chain.thigh_b.layers)
|
||||
for attr in fk_chain.attr_names:
|
||||
getattr(fk_chain, attr + "_b").layers = layer
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layers = layer
|
||||
arm.bones[foot].layers = layer
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# dont blend the hips or heel
|
||||
return (None, fk_chain.thigh, fk_chain.shin, fk_chain.foot, fk_chain.toe, None, None)
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create upper leg bones: two bones, each half of the upper leg.
|
||||
uleg1 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.01" % base_names[definitions[1]], parent=True)
|
||||
uleg2 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.02" % base_names[definitions[1]], parent=True)
|
||||
uleg1.use_connect = False
|
||||
uleg2.use_connect = False
|
||||
uleg2.parent = uleg1
|
||||
center = uleg1.center
|
||||
uleg1.tail = center
|
||||
uleg2.head = center
|
||||
|
||||
# Create lower leg bones: two bones, each half of the lower leg.
|
||||
lleg1 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.01" % base_names[definitions[2]], parent=True)
|
||||
lleg2 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.02" % base_names[definitions[2]], parent=True)
|
||||
lleg1.use_connect = False
|
||||
lleg2.use_connect = False
|
||||
lleg2.parent = lleg1
|
||||
center = lleg1.center
|
||||
lleg1.tail = center
|
||||
lleg2.head = center
|
||||
|
||||
# Create a bone for the second lower leg deform bone to twist with
|
||||
twist = copy_bone_simple(obj.data, lleg2.name, "MCH-leg_twist")
|
||||
twist.length /= 4
|
||||
twist.use_connect = False
|
||||
twist.parent = obj.data.edit_bones[definitions[3]]
|
||||
|
||||
# Create foot bone
|
||||
foot = copy_bone_simple(obj.data, definitions[3], "DEF-%s" % base_names[definitions[3]], parent=True)
|
||||
|
||||
# Create toe bone
|
||||
toe = copy_bone_simple(obj.data, definitions[4], "DEF-%s" % base_names[definitions[4]], parent=True)
|
||||
|
||||
# Store names before leaving edit mode
|
||||
uleg1_name = uleg1.name
|
||||
uleg2_name = uleg2.name
|
||||
lleg1_name = lleg1.name
|
||||
lleg2_name = lleg2.name
|
||||
twist_name = twist.name
|
||||
foot_name = foot.name
|
||||
toe_name = toe.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bones
|
||||
uleg1 = obj.pose.bones[uleg1_name]
|
||||
uleg2 = obj.pose.bones[uleg2_name]
|
||||
lleg1 = obj.pose.bones[lleg1_name]
|
||||
lleg2 = obj.pose.bones[lleg2_name]
|
||||
foot = obj.pose.bones[foot_name]
|
||||
toe = obj.pose.bones[toe_name]
|
||||
|
||||
# Upper leg constraints
|
||||
con = uleg1.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = uleg1.constraints.new('COPY_SCALE')
|
||||
con.name = "scale"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
con = uleg2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
# Lower leg constraints
|
||||
con = lleg1.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = lleg1.constraints.new('COPY_SCALE')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = lleg2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = twist_name
|
||||
|
||||
con = lleg2.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
# Foot constraint
|
||||
con = foot.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
# Toe constraint
|
||||
con = toe.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[4]
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
return (uleg1_name, uleg2_name, lleg1_name, lleg2_name, foot_name, toe_name, None)
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_fk = fk(obj, bone_definition, base_names, options)
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
blend_bone_list(obj, bone_definition + [None], bones_fk, bones_ik, target_bone=bones_ik[6], target_prop="ik", blend_default=1.0)
|
@ -1,497 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from math import pi
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple, get_side_name, get_base_name
|
||||
from mathutils import Vector
|
||||
|
||||
METARIG_NAMES = "hips", "thigh", "shin", "foot", "toe"
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('body')
|
||||
bone.head[:] = -0.0728, -0.2427, 0.0000
|
||||
bone.tail[:] = -0.0728, -0.2427, 0.2427
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('thigh')
|
||||
bone.head[:] = 0.0000, 0.0000, -0.0000
|
||||
bone.tail[:] = 0.0813, -0.2109, -0.3374
|
||||
bone.roll = -0.4656
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['body']
|
||||
bone = arm.edit_bones.new('shin')
|
||||
bone.head[:] = 0.0813, -0.2109, -0.3374
|
||||
bone.tail[:] = 0.0714, -0.0043, -0.5830
|
||||
bone.roll = -0.2024
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thigh']
|
||||
bone = arm.edit_bones.new('foot')
|
||||
bone.head[:] = 0.0714, -0.0043, -0.5830
|
||||
bone.tail[:] = 0.0929, -0.0484, -0.7652
|
||||
bone.roll = -0.3766
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['shin']
|
||||
bone = arm.edit_bones.new('toe')
|
||||
bone.head[:] = 0.0929, -0.0484, -0.7652
|
||||
bone.tail[:] = 0.1146, -0.1244, -0.7652
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['foot']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['thigh']
|
||||
pbone['type'] = 'leg_quadruped'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in a chain
|
||||
Expects a chain of at least 3 children.
|
||||
eg.
|
||||
thigh -> shin -> foot -> [toe, heel]
|
||||
'''
|
||||
|
||||
bone_definition = []
|
||||
|
||||
orig_bone = obj.data.bones[orig_bone_name]
|
||||
orig_bone_parent = orig_bone.parent
|
||||
|
||||
if orig_bone_parent is None:
|
||||
raise RigifyError("expected the thigh bone to have a parent hip bone")
|
||||
|
||||
bone_definition.append(orig_bone_parent.name)
|
||||
bone_definition.append(orig_bone.name)
|
||||
|
||||
|
||||
bone = orig_bone
|
||||
chain = 0
|
||||
while chain < 3: # first 2 bones only have 1 child
|
||||
children = bone.children
|
||||
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the thigh bone to have 3 children without a fork")
|
||||
bone = children[0]
|
||||
bone_definition.append(bone.name) # shin, foot
|
||||
chain += 1
|
||||
|
||||
if len(bone_definition) != len(METARIG_NAMES):
|
||||
raise RigifyError("internal problem, expected %d bones" % len(METARIG_NAMES))
|
||||
|
||||
return bone_definition
|
||||
|
||||
|
||||
def ik(obj, bone_definition, base_names, options):
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
arm = obj.data
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# setup the existing bones, use names from METARIG_NAMES
|
||||
mt = bone_class_instance(obj, ["hips"])
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
|
||||
mt.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
mt_chain.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
|
||||
ik_chain = mt_chain.copy(to_fmt="MCH-%s.ik", base_names=base_names)
|
||||
|
||||
ik_chain.thigh_e.use_connect = False
|
||||
ik_chain.thigh_e.parent = mt.hips_e
|
||||
|
||||
ik_chain.foot_e.parent = None
|
||||
ik_chain.rename("foot", get_base_name(base_names[bone_definition[3]]) + "_ik" + get_side_name(base_names[bone_definition[3]]))
|
||||
ik_chain.rename("toe", get_base_name(base_names[bone_definition[4]]) + "_ik" + get_side_name(base_names[bone_definition[4]]))
|
||||
|
||||
# keep the foot_ik as the parent
|
||||
ik_chain.toe_e.use_connect = False
|
||||
|
||||
# Foot uses pose space, not local space, for translation
|
||||
ik_chain.foot_e.use_local_location = False
|
||||
|
||||
# must be after disconnecting the toe
|
||||
ik_chain.foot_e.align_orientation(mt_chain.toe_e)
|
||||
|
||||
# children of ik_foot
|
||||
ik = bone_class_instance(obj, ["foot_roll", "foot_roll_01", "foot_roll_02", "foot_target"])
|
||||
|
||||
# knee rotator
|
||||
knee_rotator = copy_bone_simple(arm, mt_chain.toe, "knee_rotator" + get_side_name(base_names[mt_chain.foot]), parent=True).name
|
||||
eb[knee_rotator].use_connect = False
|
||||
eb[knee_rotator].parent = eb[mt.hips]
|
||||
eb[knee_rotator].head = eb[ik_chain.thigh].head
|
||||
eb[knee_rotator].tail = eb[knee_rotator].head + eb[mt_chain.toe].vector
|
||||
eb[knee_rotator].length = eb[ik_chain.thigh].length / 2
|
||||
eb[knee_rotator].roll += pi/2
|
||||
|
||||
# parent ik leg to the knee rotator
|
||||
eb[ik_chain.thigh].parent = eb[knee_rotator]
|
||||
|
||||
# foot roll is an interesting one!
|
||||
# plot a vector from the toe bones head, bactwards to the length of the foot
|
||||
# then align it with the foot but reverse direction.
|
||||
ik.foot_roll_e = copy_bone_simple(arm, mt_chain.toe, get_base_name(base_names[mt_chain.foot]) + "_roll" + get_side_name(base_names[mt_chain.foot]))
|
||||
ik.foot_roll = ik.foot_roll_e.name
|
||||
ik.foot_roll_e.use_connect = False
|
||||
ik.foot_roll_e.parent = ik_chain.foot_e
|
||||
ik.foot_roll_e.head -= mt_chain.toe_e.vector.normalize() * mt_chain.foot_e.length
|
||||
ik.foot_roll_e.tail = ik.foot_roll_e.head - (mt_chain.foot_e.vector.normalize() * mt_chain.toe_e.length)
|
||||
ik.foot_roll_e.align_roll(mt_chain.foot_e.matrix.rotation_part() * Vector((0.0, 0.0, -1.0)))
|
||||
|
||||
# MCH-foot
|
||||
ik.foot_roll_01_e = copy_bone_simple(arm, mt_chain.foot, "MCH-" + base_names[mt_chain.foot])
|
||||
ik.foot_roll_01 = ik.foot_roll_01_e.name
|
||||
ik.foot_roll_01_e.parent = ik_chain.foot_e
|
||||
ik.foot_roll_01_e.head, ik.foot_roll_01_e.tail = mt_chain.foot_e.tail, mt_chain.foot_e.head
|
||||
ik.foot_roll_01_e.roll = ik.foot_roll_e.roll
|
||||
|
||||
# ik_target, child of MCH-foot
|
||||
ik.foot_target_e = copy_bone_simple(arm, mt_chain.foot, "MCH-" + base_names[mt_chain.foot] + "_ik_target")
|
||||
ik.foot_target = ik.foot_target_e.name
|
||||
ik.foot_target_e.parent = ik.foot_roll_01_e
|
||||
ik.foot_target_e.align_orientation(ik_chain.foot_e)
|
||||
ik.foot_target_e.length = ik_chain.foot_e.length / 2.0
|
||||
ik.foot_target_e.use_connect = True
|
||||
|
||||
# MCH-foot.02 child of MCH-foot
|
||||
ik.foot_roll_02_e = copy_bone_simple(arm, mt_chain.foot, "MCH-%s_02" % base_names[mt_chain.foot])
|
||||
ik.foot_roll_02 = ik.foot_roll_02_e.name
|
||||
ik.foot_roll_02_e.parent = ik.foot_roll_01_e
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
mt_chain.update()
|
||||
ik.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
#pb[knee_rotator].rotation_mode = 'YXZ'
|
||||
#pb[knee_rotator].lock_rotation = False, True, False
|
||||
pb[knee_rotator].lock_location = True, True, True
|
||||
pb[ik.foot_roll].rotation_mode = 'XYZ'
|
||||
pb[ik.foot_roll].lock_rotation = False, True, True
|
||||
pb[ik_chain.toe].rotation_mode = 'XYZ'
|
||||
pb[ik_chain.toe].lock_rotation = False, True, True
|
||||
|
||||
# IK switch property
|
||||
prop = rna_idprop_ui_prop_get(pb[ik_chain.foot], "ik", create=True)
|
||||
pb[ik_chain.foot]["ik"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
ik_driver_path = pb[ik_chain.foot].path_from_id() + '["ik"]'
|
||||
|
||||
# simple constraining of orig bones
|
||||
con = mt_chain.thigh_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.thigh
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = ik_driver_path
|
||||
|
||||
con = mt_chain.shin_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.shin
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = ik_driver_path
|
||||
|
||||
con = mt_chain.foot_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll_02
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = ik_driver_path
|
||||
|
||||
con = mt_chain.toe_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = ik_chain.toe
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = ik_driver_path
|
||||
|
||||
# others...
|
||||
con = ik.foot_roll_01_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_roll
|
||||
con.target_space = 'LOCAL'
|
||||
con.owner_space = 'LOCAL'
|
||||
|
||||
|
||||
# IK
|
||||
con = ik_chain.shin_p.constraints.new('IK')
|
||||
con.chain_count = 2
|
||||
con.iterations = 500
|
||||
con.pole_angle = -90.0 # XXX - in deg!
|
||||
con.use_tail = True
|
||||
con.use_stretch = True
|
||||
con.use_target = True
|
||||
con.use_rotation = False
|
||||
con.weight = 1.0
|
||||
|
||||
con.target = obj
|
||||
con.subtarget = ik.foot_target
|
||||
|
||||
con.pole_target = None
|
||||
|
||||
ik.update()
|
||||
ik_chain.update()
|
||||
|
||||
# Set layers of the bones.
|
||||
if "ik_layer" in options:
|
||||
layer = [n==options["ik_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(mt_chain.thigh_b.layers)
|
||||
for attr in ik_chain.attr_names:
|
||||
obj.data.bones[getattr(ik_chain, attr)].layers = layer
|
||||
for attr in ik.attr_names:
|
||||
obj.data.bones[getattr(ik, attr)].layers = layer
|
||||
obj.data.bones[knee_rotator].layers = layer
|
||||
|
||||
return None, ik_chain.thigh, ik_chain.shin, ik_chain.foot, ik_chain.toe
|
||||
|
||||
|
||||
|
||||
def fk(obj, bone_definition, base_names, options):
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
arm = obj.data
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# setup the existing bones, use names from METARIG_NAMES
|
||||
mt = bone_class_instance(obj, ["hips"])
|
||||
mt_chain = bone_class_instance(obj, ["thigh", "shin", "foot", "toe"])
|
||||
|
||||
mt.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
mt_chain.attr_initialize(METARIG_NAMES, bone_definition)
|
||||
|
||||
fk_chain = mt_chain.copy(to_fmt="%s", base_names=base_names)
|
||||
|
||||
# Create the socket
|
||||
socket = copy_bone_simple(arm, mt_chain.thigh, "MCH-leg_socket").name
|
||||
eb[socket].parent = eb[mt.hips]
|
||||
eb[socket].length = eb[mt_chain.thigh].length / 4
|
||||
|
||||
# Create the hinge
|
||||
hinge = copy_bone_simple(arm, mt.hips, "MCH-leg_hinge").name
|
||||
eb[hinge].length = eb[mt.hips].length / 2
|
||||
|
||||
# Make leg child of hinge
|
||||
eb[fk_chain.thigh].use_connect = False
|
||||
eb[fk_chain.thigh].parent = eb[hinge]
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Set rotation modes and axis locks
|
||||
pb[fk_chain.shin].rotation_mode = 'XYZ'
|
||||
pb[fk_chain.shin].lock_rotation = False, True, True
|
||||
|
||||
# Constrain original bones to control bones
|
||||
con = mt_chain.thigh_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.thigh
|
||||
|
||||
con = mt_chain.shin_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.shin
|
||||
|
||||
con = mt_chain.foot_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.foot
|
||||
|
||||
con = mt_chain.toe_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = fk_chain.toe
|
||||
|
||||
# Socket constraint
|
||||
con = pb[fk_chain.thigh].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = socket
|
||||
|
||||
# Hinge constraint
|
||||
con = pb[hinge].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = mt.hips
|
||||
|
||||
prop = rna_idprop_ui_prop_get(pb[fk_chain.thigh], "hinge", create=True)
|
||||
pb[fk_chain.thigh]["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
hinge_driver_path = pb[fk_chain.thigh].path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
return None, fk_chain.thigh, fk_chain.shin, fk_chain.foot, fk_chain.toe
|
||||
|
||||
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create upper leg bones: two bones, each half of the upper leg.
|
||||
uleg1 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.01" % base_names[definitions[1]], parent=True)
|
||||
uleg2 = copy_bone_simple(obj.data, definitions[1], "DEF-%s.02" % base_names[definitions[1]], parent=True)
|
||||
uleg1.use_connect = False
|
||||
uleg2.use_connect = False
|
||||
uleg2.parent = uleg1
|
||||
center = uleg1.center
|
||||
uleg1.tail = center
|
||||
uleg2.head = center
|
||||
|
||||
# Create lower leg bones: two bones, each half of the lower leg.
|
||||
lleg1 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.01" % base_names[definitions[2]], parent=True)
|
||||
lleg2 = copy_bone_simple(obj.data, definitions[2], "DEF-%s.02" % base_names[definitions[2]], parent=True)
|
||||
lleg1.use_connect = False
|
||||
lleg2.use_connect = False
|
||||
lleg2.parent = lleg1
|
||||
center = lleg1.center
|
||||
lleg1.tail = center
|
||||
lleg2.head = center
|
||||
|
||||
# Create a bone for the second lower leg deform bone to twist with
|
||||
twist = copy_bone_simple(obj.data, lleg2.name, "MCH-leg_twist")
|
||||
twist.length /= 4
|
||||
twist.use_connect = False
|
||||
twist.parent = obj.data.edit_bones[definitions[3]]
|
||||
|
||||
# Create foot bone
|
||||
foot = copy_bone_simple(obj.data, definitions[3], "DEF-%s" % base_names[definitions[3]], parent=True)
|
||||
|
||||
# Create toe bone
|
||||
toe = copy_bone_simple(obj.data, definitions[4], "DEF-%s" % base_names[definitions[4]], parent=True)
|
||||
|
||||
# Store names before leaving edit mode
|
||||
uleg1_name = uleg1.name
|
||||
uleg2_name = uleg2.name
|
||||
lleg1_name = lleg1.name
|
||||
lleg2_name = lleg2.name
|
||||
twist_name = twist.name
|
||||
foot_name = foot.name
|
||||
toe_name = toe.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bones
|
||||
uleg1 = obj.pose.bones[uleg1_name]
|
||||
uleg2 = obj.pose.bones[uleg2_name]
|
||||
lleg1 = obj.pose.bones[lleg1_name]
|
||||
lleg2 = obj.pose.bones[lleg2_name]
|
||||
foot = obj.pose.bones[foot_name]
|
||||
toe = obj.pose.bones[toe_name]
|
||||
|
||||
# Upper leg constraints
|
||||
con = uleg1.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = uleg2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[1]
|
||||
|
||||
# Lower leg constraints
|
||||
con = lleg1.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = lleg2.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = twist_name
|
||||
|
||||
con = lleg2.constraints.new('DAMPED_TRACK')
|
||||
con.name = "trackto"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
# Foot constraint
|
||||
con = foot.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
# Toe constraint
|
||||
con = toe.constraints.new('COPY_ROTATION')
|
||||
con.name = "copy_rot"
|
||||
con.target = obj
|
||||
con.subtarget = definitions[4]
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
return (uleg1_name, uleg2_name, lleg1_name, lleg2_name, foot_name, toe_name, None)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
bones_fk = fk(obj, bone_definition, base_names, options)
|
||||
bones_ik = ik(obj, bone_definition, base_names, options)
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
return bones_ik
|
@ -1,756 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from math import acos, pi
|
||||
from mathutils import Vector
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "mouth"
|
||||
|
||||
|
||||
def mark_actions():
|
||||
for action in bpy.data.actions:
|
||||
action.tag = True
|
||||
|
||||
def get_unmarked_action():
|
||||
for action in bpy.data.actions:
|
||||
if action.tag != True:
|
||||
return action
|
||||
return None
|
||||
|
||||
def add_action(name=None):
|
||||
mark_actions()
|
||||
bpy.ops.action.new()
|
||||
action = get_unmarked_action()
|
||||
if name is not None:
|
||||
action.name = name
|
||||
return action
|
||||
|
||||
def addget_shape_key(obj, name="Key"):
|
||||
""" Fetches a shape key, or creates it if it doesn't exist
|
||||
"""
|
||||
# Create a shapekey set if it doesn't already exist
|
||||
if obj.data.shape_keys is None:
|
||||
shape = obj.add_shape_key(name="Basis", from_mix=False)
|
||||
obj.active_shape_key_index = 0
|
||||
|
||||
# Get the shapekey, or create it if it doesn't already exist
|
||||
if name in obj.data.shape_keys.keys:
|
||||
shape_key = obj.data.shape_keys.keys[name]
|
||||
else:
|
||||
shape_key = obj.add_shape_key(name=name, from_mix=False)
|
||||
|
||||
return shape_key
|
||||
|
||||
|
||||
def addget_shape_key_driver(obj, name="Key"):
|
||||
""" Fetches the driver for the shape key, or creates it if it doesn't
|
||||
already exist.
|
||||
"""
|
||||
driver_path = 'keys["' + name + '"].value'
|
||||
fcurve = None
|
||||
driver = None
|
||||
new = False
|
||||
if obj.data.shape_keys.animation_data is not None:
|
||||
for driver_s in obj.data.shape_keys.animation_data.drivers:
|
||||
if driver_s.data_path == driver_path:
|
||||
fcurve = driver_s
|
||||
if fcurve is None:
|
||||
fcurve = obj.data.shape_keys.keys[name].driver_add("value")
|
||||
fcurve.driver.type = 'AVERAGE'
|
||||
new = True
|
||||
|
||||
return fcurve, new
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bone = obj.data.bones[orig_bone_name]
|
||||
chain = []
|
||||
|
||||
try:
|
||||
chain += [bone.parent.parent.name, bone.parent.name, bone.name]
|
||||
except AttributeError:
|
||||
raise RigifyError("'%s' rig type requires a chain of two parents (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
chain += [child.name for child in bone.children_recursive_basename]
|
||||
|
||||
if len(chain) < 10:
|
||||
raise RigifyError("'%s' rig type requires a chain of 8 bones (bone: %s)" % (RIG_TYPE, orig_bone_name))
|
||||
|
||||
return chain[:10]
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
jaw = definitions[1]
|
||||
|
||||
# Options
|
||||
req_options = ["mesh"]
|
||||
for option in req_options:
|
||||
if option not in options:
|
||||
raise RigifyError("'%s' rig type requires a '%s' option (bone: %s)" % (RIG_TYPE, option, base_names[definitions[0]]))
|
||||
|
||||
meshes = options["mesh"].replace(" ", "").split(",")
|
||||
|
||||
# Lip DEF
|
||||
lip1 = copy_bone_simple(obj.data, definitions[2], "DEF-" + base_names[definitions[2]]).name
|
||||
lip2 = copy_bone_simple(obj.data, definitions[3], "DEF-" + base_names[definitions[3]]).name
|
||||
lip3 = copy_bone_simple(obj.data, definitions[4], "DEF-" + base_names[definitions[4]]).name
|
||||
lip4 = copy_bone_simple(obj.data, definitions[5], "DEF-" + base_names[definitions[5]]).name
|
||||
lip5 = copy_bone_simple(obj.data, definitions[6], "DEF-" + base_names[definitions[6]]).name
|
||||
lip6 = copy_bone_simple(obj.data, definitions[7], "DEF-" + base_names[definitions[7]]).name
|
||||
lip7 = copy_bone_simple(obj.data, definitions[8], "DEF-" + base_names[definitions[8]]).name
|
||||
lip8 = copy_bone_simple(obj.data, definitions[9], "DEF-" + base_names[definitions[9]]).name
|
||||
|
||||
# Mouth corner spread bones (for driving corrective shape keys)
|
||||
spread_l_1 = copy_bone_simple(obj.data, definitions[6], "MCH-" + base_names[definitions[6]] + ".spread_1").name
|
||||
spread_l_2 = copy_bone_simple(obj.data, definitions[6], "MCH-" + base_names[definitions[6]] + ".spread_2").name
|
||||
eb[spread_l_1].tail = eb[definitions[5]].head
|
||||
eb[spread_l_2].tail = eb[definitions[5]].head
|
||||
eb[spread_l_1].roll = 0
|
||||
eb[spread_l_2].roll = 0
|
||||
eb[spread_l_1].use_connect = False
|
||||
eb[spread_l_2].use_connect = False
|
||||
eb[spread_l_1].parent = eb[definitions[6]]
|
||||
eb[spread_l_2].parent = eb[definitions[6]]
|
||||
|
||||
spread_r_1 = copy_bone_simple(obj.data, definitions[2], "MCH-" + base_names[definitions[2]] + ".spread_1").name
|
||||
spread_r_2 = copy_bone_simple(obj.data, definitions[2], "MCH-" + base_names[definitions[2]] + ".spread_2").name
|
||||
eb[spread_r_1].tail = eb[definitions[3]].head
|
||||
eb[spread_r_2].tail = eb[definitions[3]].head
|
||||
eb[spread_r_1].roll = 0
|
||||
eb[spread_r_2].roll = 0
|
||||
eb[spread_r_1].use_connect = False
|
||||
eb[spread_r_2].use_connect = False
|
||||
eb[spread_r_1].parent = eb[definitions[2]]
|
||||
eb[spread_r_2].parent = eb[definitions[2]]
|
||||
|
||||
|
||||
|
||||
# Jaw open bones (for driving corrective shape keys)
|
||||
jopen1 = copy_bone_simple(obj.data, jaw, "MCH-"+base_names[jaw]+".track1", parent=True).name
|
||||
eb[jopen1].use_connect = False
|
||||
eb[jopen1].head = eb[jaw].tail
|
||||
eb[jopen1].tail = eb[jopen1].head + Vector((0, 0, eb[jaw].length/4))
|
||||
|
||||
jopen2 = copy_bone_simple(obj.data, jopen1, "MCH-"+base_names[jaw]+".track2").name
|
||||
eb[jopen2].parent = eb[jaw]
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constrain DEF bones to ORG bones
|
||||
con = pb[lip1].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[2]
|
||||
|
||||
con = pb[lip2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[3]
|
||||
|
||||
con = pb[lip3].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[4]
|
||||
|
||||
con = pb[lip4].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[5]
|
||||
|
||||
con = pb[lip5].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[6]
|
||||
|
||||
con = pb[lip6].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[7]
|
||||
|
||||
con = pb[lip7].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[8]
|
||||
|
||||
con = pb[lip8].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = definitions[9]
|
||||
|
||||
# Constraint mouth corner spread bones
|
||||
con = pb[spread_l_1].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lip4
|
||||
|
||||
con = pb[spread_l_2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = spread_l_1
|
||||
|
||||
con = pb[spread_l_2].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lip6
|
||||
|
||||
con = pb[spread_r_1].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lip2
|
||||
|
||||
con = pb[spread_r_2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = spread_r_1
|
||||
|
||||
con = pb[spread_r_2].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = lip8
|
||||
|
||||
|
||||
# Corrective shape keys for the corners of the mouth.
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Calculate the rotation difference between the bones
|
||||
rotdiff_l = acos((eb[lip5].head - eb[lip4].head).normalize().dot((eb[lip5].head - eb[lip6].head).normalize()))
|
||||
rotdiff_r = acos((eb[lip1].head - eb[lip2].head).normalize().dot((eb[lip1].head - eb[lip8].head).normalize()))
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
|
||||
# Left side shape key
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
shape_key_name = "COR-" + base_names[definitions[6]] + ".spread"
|
||||
|
||||
# Add/get the shape key
|
||||
shape_key = addget_shape_key(mesh_obj, name=shape_key_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve, is_new_driver = addget_shape_key_driver(mesh_obj, name=shape_key_name)
|
||||
driver = fcurve.driver
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
var_name = base_names[definitions[6]]
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "ROTATION_DIFF"
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].bone_target = spread_l_1
|
||||
var.targets[1].id = obj
|
||||
var.targets[1].bone_target = spread_l_2
|
||||
|
||||
# Set fcurve offset
|
||||
if is_new_driver:
|
||||
mod = fcurve.modifiers[0]
|
||||
if rotdiff_l != pi:
|
||||
mod.coefficients[0] = -rotdiff_l / (pi-rotdiff_l)
|
||||
mod.coefficients[1] = 1 / (pi-rotdiff_l)
|
||||
|
||||
# Right side shape key
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
shape_key_name = "COR-" + base_names[definitions[2]] + ".spread"
|
||||
|
||||
# Add/get the shape key
|
||||
shape_key = addget_shape_key(mesh_obj, name=shape_key_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve, is_new_driver = addget_shape_key_driver(mesh_obj, name=shape_key_name)
|
||||
driver = fcurve.driver
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
var_name = base_names[definitions[2]]
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "ROTATION_DIFF"
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].bone_target = spread_r_1
|
||||
var.targets[1].id = obj
|
||||
var.targets[1].bone_target = spread_r_2
|
||||
|
||||
# Set fcurve offset
|
||||
if is_new_driver:
|
||||
mod = fcurve.modifiers[0]
|
||||
if rotdiff_r != pi:
|
||||
mod.coefficients[0] = -rotdiff_r / (pi-rotdiff_r)
|
||||
mod.coefficients[1] = 1 / (pi-rotdiff_r)
|
||||
|
||||
# Jaw open corrective shape key
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
shape_key_name = "COR-" + base_names[definitions[4]] + ".jaw_open"
|
||||
|
||||
# Add/get the shape key
|
||||
shape_key = addget_shape_key(mesh_obj, name=shape_key_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve, is_new_driver = addget_shape_key_driver(mesh_obj, name=shape_key_name)
|
||||
driver = fcurve.driver
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
var_name = base_names[definitions[4]]
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "LOC_DIFF"
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].bone_target = jopen1
|
||||
var.targets[1].id = obj
|
||||
var.targets[1].bone_target = jopen2
|
||||
|
||||
# Set fcurve offset
|
||||
if is_new_driver:
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.coefficients[0] = 0.0
|
||||
mod.coefficients[1] = 1.0 / bb[jaw].length
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
head_e = eb[definitions[0]]
|
||||
jaw_e = eb[definitions[1]]
|
||||
jaw = definitions[1]
|
||||
|
||||
# Head lips
|
||||
hlip1 = copy_bone_simple(obj.data, definitions[2], "MCH-"+base_names[definitions[2]]+".head").name
|
||||
hlip2 = copy_bone_simple(obj.data, definitions[3], "MCH-"+base_names[definitions[3]]+".head").name
|
||||
hlip3 = copy_bone_simple(obj.data, definitions[4], "MCH-"+base_names[definitions[4]]+".head").name
|
||||
hlip4 = copy_bone_simple(obj.data, definitions[5], "MCH-"+base_names[definitions[5]]+".head").name
|
||||
hlip5 = copy_bone_simple(obj.data, definitions[6], "MCH-"+base_names[definitions[6]]+".head").name
|
||||
hlip6 = copy_bone_simple(obj.data, definitions[7], "MCH-"+base_names[definitions[7]]+".head").name
|
||||
hlip7 = copy_bone_simple(obj.data, definitions[8], "MCH-"+base_names[definitions[8]]+".head").name
|
||||
hlip8 = copy_bone_simple(obj.data, definitions[9], "MCH-"+base_names[definitions[9]]+".head").name
|
||||
|
||||
eb[hlip1].parent = head_e
|
||||
eb[hlip2].parent = head_e
|
||||
eb[hlip3].parent = head_e
|
||||
eb[hlip4].parent = head_e
|
||||
eb[hlip5].parent = head_e
|
||||
eb[hlip6].parent = head_e
|
||||
eb[hlip7].parent = head_e
|
||||
eb[hlip8].parent = head_e
|
||||
|
||||
# Jaw lips
|
||||
jlip1 = copy_bone_simple(obj.data, definitions[2], "MCH-"+base_names[definitions[2]]+".jaw").name
|
||||
jlip2 = copy_bone_simple(obj.data, definitions[3], "MCH-"+base_names[definitions[3]]+".jaw").name
|
||||
jlip3 = copy_bone_simple(obj.data, definitions[4], "MCH-"+base_names[definitions[4]]+".jaw").name
|
||||
jlip4 = copy_bone_simple(obj.data, definitions[5], "MCH-"+base_names[definitions[5]]+".jaw").name
|
||||
jlip5 = copy_bone_simple(obj.data, definitions[6], "MCH-"+base_names[definitions[6]]+".jaw").name
|
||||
jlip6 = copy_bone_simple(obj.data, definitions[7], "MCH-"+base_names[definitions[7]]+".jaw").name
|
||||
jlip7 = copy_bone_simple(obj.data, definitions[8], "MCH-"+base_names[definitions[8]]+".jaw").name
|
||||
jlip8 = copy_bone_simple(obj.data, definitions[9], "MCH-"+base_names[definitions[9]]+".jaw").name
|
||||
|
||||
eb[jlip1].parent = jaw_e
|
||||
eb[jlip2].parent = jaw_e
|
||||
eb[jlip3].parent = jaw_e
|
||||
eb[jlip4].parent = jaw_e
|
||||
eb[jlip5].parent = jaw_e
|
||||
eb[jlip6].parent = jaw_e
|
||||
eb[jlip7].parent = jaw_e
|
||||
eb[jlip8].parent = jaw_e
|
||||
|
||||
# Control lips
|
||||
lip1 = copy_bone_simple(obj.data, definitions[2], base_names[definitions[2]]).name
|
||||
lip2 = copy_bone_simple(obj.data, definitions[3], base_names[definitions[3]]).name
|
||||
lip3 = copy_bone_simple(obj.data, definitions[4], base_names[definitions[4]]).name
|
||||
lip4 = copy_bone_simple(obj.data, definitions[5], base_names[definitions[5]]).name
|
||||
lip5 = copy_bone_simple(obj.data, definitions[6], base_names[definitions[6]]).name
|
||||
lip6 = copy_bone_simple(obj.data, definitions[7], base_names[definitions[7]]).name
|
||||
lip7 = copy_bone_simple(obj.data, definitions[8], base_names[definitions[8]]).name
|
||||
lip8 = copy_bone_simple(obj.data, definitions[9], base_names[definitions[9]]).name
|
||||
|
||||
eb[lip1].parent = eb[hlip1]
|
||||
eb[lip2].parent = eb[hlip2]
|
||||
eb[lip3].parent = eb[hlip3]
|
||||
eb[lip4].parent = eb[hlip4]
|
||||
eb[lip5].parent = eb[hlip5]
|
||||
eb[lip6].parent = eb[hlip6]
|
||||
eb[lip7].parent = eb[hlip7]
|
||||
eb[lip8].parent = eb[hlip8]
|
||||
|
||||
# Jaw open tracker
|
||||
jopent = copy_bone_simple(obj.data, jaw_e.name, "MCH-"+base_names[jaw_e.name]+".track", parent=True).name
|
||||
eb[jopent].use_connect = False
|
||||
eb[jopent].tail = jaw_e.tail + Vector((0.0, 0.0, jaw_e.length))
|
||||
eb[jopent].head = jaw_e.tail
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Add mouth open action if it doesn't already exist
|
||||
action_name = "mouth_open"
|
||||
if action_name in bpy.data.actions:
|
||||
open_action = bpy.data.actions[action_name]
|
||||
else:
|
||||
open_action = add_action(name=action_name)
|
||||
|
||||
# Add close property (useful when making the animation in the action)
|
||||
prop_name = "open_action"
|
||||
prop = rna_idprop_ui_prop_get(pb[lip1], prop_name, create=True)
|
||||
pb[lip1][prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
open_driver_path = pb[lip1].path_from_id() + '["open_action"]'
|
||||
|
||||
|
||||
# Constraints
|
||||
|
||||
# Jaw open tracker stretches to jaw tip
|
||||
con = pb[jopent].constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = jaw
|
||||
con.head_tail = 1.0
|
||||
con.rest_length = bb[jopent].length
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
# Head lips to jaw lips
|
||||
influence = [0.02, 0.1, 0.35, 0.25, 0.0]
|
||||
|
||||
con = pb[hlip1].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip1
|
||||
con.influence = influence[2]
|
||||
|
||||
con = pb[hlip2].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip2
|
||||
con.influence = influence[1]
|
||||
|
||||
con = pb[hlip3].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip3
|
||||
con.influence = influence[0]
|
||||
|
||||
con = pb[hlip4].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip4
|
||||
con.influence = influence[1]
|
||||
|
||||
con = pb[hlip5].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip5
|
||||
con.influence = influence[2]
|
||||
|
||||
con = pb[hlip6].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip6
|
||||
con.influence = 1.0 - influence[3]
|
||||
|
||||
con = pb[hlip7].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip7
|
||||
con.influence = 1.0 - influence[4]
|
||||
|
||||
con = pb[hlip8].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = jlip8
|
||||
con.influence = 1.0 - influence[3]
|
||||
|
||||
# ORG bones to lips
|
||||
con = pb[definitions[2]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip1
|
||||
|
||||
con = pb[definitions[3]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip2
|
||||
|
||||
con = pb[definitions[4]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip3
|
||||
|
||||
con = pb[definitions[5]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip4
|
||||
|
||||
con = pb[definitions[6]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip5
|
||||
|
||||
con = pb[definitions[7]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip6
|
||||
|
||||
con = pb[definitions[8]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip7
|
||||
|
||||
con = pb[definitions[9]].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = lip8
|
||||
|
||||
# Action constraints for open mouth
|
||||
con = pb[lip1].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip2].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip3].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip4].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip5].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip6].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip7].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
con = pb[lip8].constraints.new('ACTION')
|
||||
con.target = obj
|
||||
con.subtarget = jopent
|
||||
con.action = open_action
|
||||
con.transform_channel = 'SCALE_Y'
|
||||
con.frame_start = 0
|
||||
con.frame_end = 60
|
||||
con.min = 0.0
|
||||
con.max = 1.0
|
||||
con.target_space = 'LOCAL'
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = open_driver_path
|
||||
|
||||
|
||||
# Set layers
|
||||
layer = list(bb[definitions[2]].layers)
|
||||
bb[lip1].layers = layer
|
||||
bb[lip2].layers = layer
|
||||
bb[lip3].layers = layer
|
||||
bb[lip4].layers = layer
|
||||
bb[lip5].layers = layer
|
||||
bb[lip6].layers = layer
|
||||
bb[lip7].layers = layer
|
||||
bb[lip8].layers = layer
|
||||
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control rig
|
||||
control(obj, bone_definition, base_names, options)
|
||||
# Create deform rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def make_lip_stretch_bone(obj, name, bone1, bone2, roll_alpha):
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
# Create the bone, pointing from bone1 to bone2
|
||||
bone_e = copy_bone_simple(obj.data, bone1, name, parent=True)
|
||||
bone_e.use_connect = False
|
||||
bone_e.tail = eb[bone2].head
|
||||
bone = bone_e.name
|
||||
|
||||
# Align the bone roll with the average direction of bone1 and bone2
|
||||
vec = bone_e.y_axis.cross(((1.0-roll_alpha)*eb[bone1].y_axis) + (roll_alpha*eb[bone2].y_axis)).normalize()
|
||||
|
||||
ang = acos(vec * bone_e.x_axis)
|
||||
|
||||
bone_e.roll += ang
|
||||
c1 = vec * bone_e.x_axis
|
||||
bone_e.roll -= (ang*2)
|
||||
c2 = vec * bone_e.x_axis
|
||||
|
||||
if c1 > c2:
|
||||
bone_e.roll += (ang*2)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
bone_p = pb[bone]
|
||||
|
||||
# Constrains
|
||||
con = bone_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = bone1
|
||||
|
||||
con = bone_p.constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = bone2
|
||||
|
||||
con = bone_p.constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = bone2
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
return bone
|
@ -1,344 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# TODO:
|
||||
## generated by rigify.write_meta_rig
|
||||
#bpy.ops.object.mode_set(mode='EDIT')
|
||||
#obj = bpy.context.active_object
|
||||
#arm = obj.data
|
||||
#bone = arm.edit_bones.new('body')
|
||||
#bone.head[:] = 0.0000, -0.0276, -0.1328
|
||||
#bone.tail[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
#bone = arm.edit_bones.new('head')
|
||||
#bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.tail[:] = 0.0000, 0.0726, 0.1354
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['body']
|
||||
#bone = arm.edit_bones.new('neck.01')
|
||||
#bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.tail[:] = 0.0000, -0.0099, 0.0146
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
#bone.parent = arm.edit_bones['head']
|
||||
#bone = arm.edit_bones.new('neck.02')
|
||||
#bone.head[:] = 0.0000, -0.0099, 0.0146
|
||||
#bone.tail[:] = 0.0000, -0.0242, 0.0514
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.01']
|
||||
#bone = arm.edit_bones.new('neck.03')
|
||||
#bone.head[:] = 0.0000, -0.0242, 0.0514
|
||||
#bone.tail[:] = 0.0000, -0.0417, 0.0868
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.02']
|
||||
#bone = arm.edit_bones.new('neck.04')
|
||||
#bone.head[:] = 0.0000, -0.0417, 0.0868
|
||||
#bone.tail[:] = 0.0000, -0.0509, 0.1190
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.03']
|
||||
#bone = arm.edit_bones.new('neck.05')
|
||||
#bone.head[:] = 0.0000, -0.0509, 0.1190
|
||||
#bone.tail[:] = 0.0000, -0.0537, 0.1600
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.04']
|
||||
#
|
||||
#bpy.ops.object.mode_set(mode='OBJECT')
|
||||
#pbone = obj.pose.bones['head']
|
||||
#pbone['type'] = 'neck_flex'
|
||||
pass
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is neck_01, its parent is the body
|
||||
eg.
|
||||
body -> neck_01 -> neck_02 -> neck_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
neck = arm.bones[orig_bone_name]
|
||||
body = neck.parent
|
||||
|
||||
bone_definition = [body.name, neck.name]
|
||||
bone_definition.extend([child.name for child in neck.children_recursive_basename])
|
||||
return bone_definition
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
for org_bone_name in definitions[1:]:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, org_bone_name, "DEF-%s" % base_names[org_bone_name], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
# XXX. Todo, is this needed if the bone is connected to its parent?
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = org_bone_name
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
from mathutils import Vector
|
||||
|
||||
arm = obj.data
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
body = bone_definition[0]
|
||||
|
||||
# Create the neck and head control bones
|
||||
if "head_name" in options:
|
||||
head_name = options["head_name"]
|
||||
else:
|
||||
head_name = "head"
|
||||
|
||||
neck_name = base_names[bone_definition[1]].split(".")[0]
|
||||
|
||||
neck_ctrl = copy_bone_simple(arm, bone_definition[1], neck_name).name
|
||||
head_ctrl = copy_bone_simple(arm, bone_definition[len(bone_definition)-1], head_name).name
|
||||
eb[head_ctrl].tail += eb[neck_ctrl].head - eb[head_ctrl].head
|
||||
eb[head_ctrl].head = eb[neck_ctrl].head
|
||||
|
||||
# Create hinge and socket bones
|
||||
neck_hinge = copy_bone_simple(arm, bone_definition[0], "MCH-" + neck_name + "_hinge").name
|
||||
head_hinge = copy_bone_simple(arm, neck_ctrl, "MCH-" + head_name + "_hinge").name
|
||||
eb[neck_hinge].tail += eb[neck_ctrl].head - eb[neck_hinge].head
|
||||
eb[neck_hinge].head = eb[neck_ctrl].head
|
||||
eb[head_hinge].tail += eb[neck_ctrl].head - eb[head_hinge].head
|
||||
eb[head_hinge].head = eb[neck_ctrl].head
|
||||
|
||||
neck_socket = copy_bone_simple(arm, bone_definition[1], "MCH-" + neck_name + "_socket").name
|
||||
head_socket = copy_bone_simple(arm, bone_definition[1], "MCH-" + head_name + "_socket").name
|
||||
|
||||
# Parent-child relationships between the body, hinges, controls, and sockets
|
||||
eb[neck_ctrl].parent = eb[neck_hinge]
|
||||
eb[head_ctrl].parent = eb[head_hinge]
|
||||
|
||||
eb[neck_socket].parent = eb[body]
|
||||
eb[head_socket].parent = eb[body]
|
||||
|
||||
# Create neck bones
|
||||
neck = [] # neck bones
|
||||
neck_neck = [] # bones constrained to neck control
|
||||
neck_head = [] # bones constrained to head control
|
||||
for i in range(1, len(bone_definition)):
|
||||
# Create bones
|
||||
neck_bone = copy_bone_simple(arm, bone_definition[i], base_names[bone_definition[i]]).name
|
||||
neck_neck_bone = copy_bone_simple(arm, neck_ctrl, "MCH-" + base_names[bone_definition[i]] + ".neck").name
|
||||
neck_head_bone = copy_bone_simple(arm, head_ctrl, "MCH-" + base_names[bone_definition[i]] + ".head").name
|
||||
|
||||
# Move them all to the same place
|
||||
eb[neck_neck_bone].tail += eb[neck_bone].head - eb[neck_neck_bone].head
|
||||
eb[neck_head_bone].tail += eb[neck_bone].head - eb[neck_neck_bone].head
|
||||
eb[neck_neck_bone].head = eb[neck_bone].head
|
||||
eb[neck_head_bone].head = eb[neck_bone].head
|
||||
|
||||
# Parent/child relationships
|
||||
eb[neck_bone].parent = eb[neck_head_bone]
|
||||
eb[neck_head_bone].parent = eb[neck_neck_bone]
|
||||
|
||||
if i > 1:
|
||||
eb[neck_neck_bone].parent = eb[neck[i-2]]
|
||||
else:
|
||||
eb[neck_neck_bone].parent = eb[body]
|
||||
|
||||
# Add them to the lists
|
||||
neck += [neck_bone]
|
||||
neck_neck += [neck_neck_bone]
|
||||
neck_head += [neck_head_bone]
|
||||
|
||||
# Create deformation rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Axis locks
|
||||
pb[neck_ctrl].lock_location = True, True, True
|
||||
pb[head_ctrl].lock_location = True, True, True
|
||||
|
||||
for bone in neck:
|
||||
pb[bone].lock_location = True, True, True
|
||||
|
||||
# Neck hinge
|
||||
prop = rna_idprop_ui_prop_get(pb[neck_ctrl], "hinge", create=True)
|
||||
pb[neck_ctrl]["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["hard_min"] = 0.0
|
||||
prop["hard_max"] = 1.0
|
||||
|
||||
con = pb[neck_hinge].constraints.new('COPY_LOCATION')
|
||||
con.name = "socket"
|
||||
con.target = obj
|
||||
con.subtarget = neck_socket
|
||||
|
||||
con = pb[neck_hinge].constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = body
|
||||
|
||||
hinge_driver_path = pb[neck_ctrl].path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
# Head hinge
|
||||
prop = rna_idprop_ui_prop_get(pb[head_ctrl], "hinge", create=True)
|
||||
pb[head_ctrl]["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["hard_min"] = 0.0
|
||||
prop["hard_max"] = 1.0
|
||||
|
||||
con = pb[head_hinge].constraints.new('COPY_LOCATION')
|
||||
con.name = "socket"
|
||||
con.target = obj
|
||||
con.subtarget = head_socket
|
||||
|
||||
con = pb[head_hinge].constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = neck_ctrl
|
||||
|
||||
hinge_driver_path = pb[head_ctrl].path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
# Neck rotation constraints
|
||||
for i in range(0, len(neck_neck)):
|
||||
con = pb[neck_neck[i]].constraints.new('COPY_ROTATION')
|
||||
con.name = "neck rotation"
|
||||
con.target = obj
|
||||
con.subtarget = neck_ctrl
|
||||
con.influence = (i+1) / len(neck_neck)
|
||||
|
||||
|
||||
# Head rotation constraints/drivers
|
||||
prop = rna_idprop_ui_prop_get(pb[head_ctrl], "extent", create=True)
|
||||
if "extent" in options:
|
||||
pb[head_ctrl]["extent"] = options["extent"]
|
||||
else:
|
||||
pb[head_ctrl]["extent"] = 0.5
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["hard_min"] = 0.0
|
||||
prop["hard_max"] = 1.0
|
||||
|
||||
extent_prop_path = pb[head_ctrl].path_from_id() + '["extent"]'
|
||||
|
||||
for i in range(0, len(neck_head)):
|
||||
con = pb[neck_head[i]].constraints.new('COPY_ROTATION')
|
||||
con.name = "head rotation"
|
||||
con.target = obj
|
||||
con.subtarget = head_ctrl
|
||||
|
||||
if i < (len(neck_head)-1):
|
||||
inf = (i+1) / len(neck_head)
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
var.name = "ext"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = extent_prop_path
|
||||
|
||||
driver.expression = "0 if ext == 0 else (((%s-1)/ext)+1)" % inf
|
||||
else:
|
||||
con.influence = 1.0
|
||||
|
||||
# Constrain original bones to the neck bones
|
||||
for i in range(0, len(neck)):
|
||||
con = pb[bone_definition[i+1]].constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_transform"
|
||||
con.target = obj
|
||||
con.subtarget = neck[i]
|
||||
|
||||
|
||||
# Set the controls' custom shapes to use other bones for transforms
|
||||
pb[neck_ctrl].custom_shape_transform = pb[bone_definition[len(bone_definition)//2]]
|
||||
pb[head_ctrl].custom_shape_transform = pb[bone_definition[len(bone_definition)-1]]
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "ex_layer" in options:
|
||||
layer = [n==options["ex_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
for bone in neck:
|
||||
bb[bone].layers = layer
|
||||
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
bb[neck_ctrl].layers = layer
|
||||
bb[head_ctrl].layers = layer
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
|
@ -1,348 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = ("body", "head")
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('body')
|
||||
bone.head[:] = 0.0000, -0.0276, -0.1328
|
||||
bone.tail[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('head')
|
||||
bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.tail[:] = 0.0000, 0.0726, 0.1354
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['body']
|
||||
bone = arm.edit_bones.new('neck.01')
|
||||
bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
bone.tail[:] = 0.0000, -0.0099, 0.0146
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['head']
|
||||
bone = arm.edit_bones.new('neck.02')
|
||||
bone.head[:] = 0.0000, -0.0099, 0.0146
|
||||
bone.tail[:] = 0.0000, -0.0242, 0.0514
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.01']
|
||||
bone = arm.edit_bones.new('neck.03')
|
||||
bone.head[:] = 0.0000, -0.0242, 0.0514
|
||||
bone.tail[:] = 0.0000, -0.0417, 0.0868
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.02']
|
||||
bone = arm.edit_bones.new('neck.04')
|
||||
bone.head[:] = 0.0000, -0.0417, 0.0868
|
||||
bone.tail[:] = 0.0000, -0.0509, 0.1190
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.03']
|
||||
bone = arm.edit_bones.new('neck.05')
|
||||
bone.head[:] = 0.0000, -0.0509, 0.1190
|
||||
bone.tail[:] = 0.0000, -0.0537, 0.1600
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.04']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['head']
|
||||
pbone['type'] = 'neck_flex'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the head, its parent is the body,
|
||||
# its only child the first of a chain with matching basenames.
|
||||
eg.
|
||||
body -> head -> neck_01 -> neck_02 -> neck_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
head = arm.bones[orig_bone_name]
|
||||
body = head.parent
|
||||
|
||||
children = head.children
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the head bone '%s' to have only 1 child." % orig_bone_name)
|
||||
|
||||
child = children[0]
|
||||
bone_definition = [body.name, head.name, child.name]
|
||||
bone_definition.extend([child.name for child in child.children_recursive_basename])
|
||||
return bone_definition
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
for org_bone_name in definitions[2:]:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, org_bone_name, "DEF-%s" % base_names[org_bone_name], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
# XXX. Todo, is this needed if the bone is connected to its parent?
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = org_bone_name
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
from mathutils import Vector
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# Initialize container classes for convenience
|
||||
mt = bone_class_instance(obj, ["body", "head"]) # meta
|
||||
mt.body = bone_definition[0]
|
||||
mt.head = bone_definition[1]
|
||||
mt.update()
|
||||
|
||||
neck_chain = bone_definition[2:]
|
||||
|
||||
mt_chain = bone_class_instance(obj, [("neck_%.2d" % (i + 1)) for i in range(len(neck_chain))]) # 99 bones enough eh?
|
||||
for i, attr in enumerate(mt_chain.attr_names):
|
||||
setattr(mt_chain, attr, neck_chain[i])
|
||||
mt_chain.update()
|
||||
|
||||
neck_chain_basename = base_names[mt_chain.neck_01_e.name].split(".")[0]
|
||||
neck_chain_segment_length = mt_chain.neck_01_e.length
|
||||
|
||||
ex = bone_class_instance(obj, ["head", "head_hinge", "neck_socket", "head_ctrl"]) # hinge & extras
|
||||
|
||||
# Add the head hinge at the bodys location, becomes the parent of the original head
|
||||
|
||||
# apply everything to this copy of the chain
|
||||
ex_chain = mt_chain.copy(base_names=base_names)
|
||||
ex_chain.neck_01_e.parent = mt_chain.neck_01_e.parent
|
||||
|
||||
|
||||
# Copy the head bone and offset
|
||||
ex.head_e = copy_bone_simple(arm, mt.head, "MCH-%s" % base_names[mt.head], parent=True)
|
||||
ex.head_e.use_connect = False
|
||||
ex.head = ex.head_e.name
|
||||
# offset
|
||||
head_length = ex.head_e.length
|
||||
ex.head_e.head.y += head_length / 2.0
|
||||
ex.head_e.tail.y += head_length / 2.0
|
||||
|
||||
# Yes, use the body bone but call it a head hinge
|
||||
ex.head_hinge_e = copy_bone_simple(arm, mt.body, "MCH-%s_hinge" % base_names[mt.head], parent=False)
|
||||
ex.head_hinge_e.use_connect = False
|
||||
ex.head_hinge = ex.head_hinge_e.name
|
||||
ex.head_hinge_e.head.y += head_length / 4.0
|
||||
ex.head_hinge_e.tail.y += head_length / 4.0
|
||||
|
||||
# Insert the neck socket, the head copys this loation
|
||||
ex.neck_socket_e = arm.edit_bones.new("MCH-%s_socked" % neck_chain_basename)
|
||||
ex.neck_socket = ex.neck_socket_e.name
|
||||
ex.neck_socket_e.use_connect = False
|
||||
ex.neck_socket_e.parent = mt.body_e
|
||||
ex.neck_socket_e.head = mt.head_e.head
|
||||
ex.neck_socket_e.tail = mt.head_e.head - Vector((0.0, neck_chain_segment_length / 2.0, 0.0))
|
||||
ex.neck_socket_e.roll = 0.0
|
||||
|
||||
|
||||
# copy of the head for controling
|
||||
ex.head_ctrl_e = copy_bone_simple(arm, mt.head, base_names[mt.head])
|
||||
ex.head_ctrl = ex.head_ctrl_e.name
|
||||
ex.head_ctrl_e.parent = ex.head_hinge_e
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_e = getattr(ex_chain, attr + "_e")
|
||||
|
||||
# dont store parent names, re-reference as each chain bones parent.
|
||||
neck_e_parent = arm.edit_bones.new("MCH-rot_%s" % base_names[getattr(mt_chain, attr)])
|
||||
neck_e_parent.head = neck_e.head
|
||||
neck_e_parent.tail = neck_e.head + (mt.head_e.vector.normalize() * neck_chain_segment_length / 2.0)
|
||||
neck_e_parent.roll = mt.head_e.roll
|
||||
|
||||
orig_parent = neck_e.parent
|
||||
neck_e.use_connect = False
|
||||
neck_e.parent = neck_e_parent
|
||||
neck_e_parent.use_connect = False
|
||||
|
||||
if i == 0:
|
||||
neck_e_parent.parent = mt.body_e
|
||||
else:
|
||||
neck_e_parent.parent = orig_parent
|
||||
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
mt_chain.update()
|
||||
ex_chain.update()
|
||||
ex.update()
|
||||
|
||||
# Axis locks
|
||||
ex.head_ctrl_p.lock_location = True, True, True
|
||||
|
||||
# Simple one off constraints, no drivers
|
||||
con = ex.head_ctrl_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.neck_socket
|
||||
|
||||
con = ex.head_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.head_ctrl
|
||||
|
||||
# driven hinge
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, "hinge", create=True)
|
||||
ex.head_ctrl_p["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = ex.head_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = mt.body
|
||||
|
||||
# add driver
|
||||
hinge_driver_path = ex.head_ctrl_p.path_from_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
head_driver_path = ex.head_ctrl_p.path_from_id()
|
||||
|
||||
target_names = [("b%.2d" % (i + 1)) for i in range(len(neck_chain))]
|
||||
|
||||
ex.head_ctrl_p["bend_tot"] = 0.0
|
||||
fcurve = ex.head_ctrl_p.driver_add('["bend_tot"]')
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SUM'
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
for i in range(len(neck_chain)):
|
||||
var = driver.variables.new()
|
||||
var.name = target_names[i]
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["bend_%.2d"]' % (i + 1))
|
||||
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_p = getattr(ex_chain, attr + "_p")
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_rotations_4d = True
|
||||
|
||||
# Add bend prop
|
||||
prop_name = "bend_%.2d" % (i + 1)
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, prop_name, create=True)
|
||||
ex.head_ctrl_p[prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# add parent constraint
|
||||
neck_p_parent = neck_p.parent
|
||||
|
||||
# add constraint
|
||||
con = neck_p_parent.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy Rotation"
|
||||
con.target = obj
|
||||
con.subtarget = ex.head
|
||||
con.owner_space = 'LOCAL'
|
||||
con.target_space = 'LOCAL'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = "bend/bend_tot"
|
||||
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
|
||||
# add target
|
||||
var = driver.variables.new()
|
||||
var.name = "bend_tot"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["bend_tot"]')
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "bend"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["%s"]' % prop_name)
|
||||
|
||||
|
||||
# finally constrain the original bone to this one
|
||||
orig_neck_p = getattr(mt_chain, attr + "_p")
|
||||
con = orig_neck_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = neck_p.name
|
||||
|
||||
|
||||
# Set the head control's custom shape to use the last
|
||||
# org neck bone for its transform
|
||||
ex.head_ctrl_p.custom_shape_transform = obj.pose.bones[bone_definition[len(bone_definition)-1]]
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "ex_layer" in options:
|
||||
layer = [n == options["ex_layer"] for n in range(0, 32)]
|
||||
else:
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
for attr in ex_chain.attr_names:
|
||||
getattr(ex_chain, attr + "_b").layers = layer
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layers = layer
|
||||
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
ex.head_ctrl_b.layers = layer
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
@ -1,270 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify_utils import copy_bone_simple, get_side_name
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = tuple()
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('hand')
|
||||
bone.head[:] = 0.0004, -0.0629, 0.0000
|
||||
bone.tail[:] = 0.0021, -0.0209, 0.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('palm.03')
|
||||
bone.head[:] = -0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0025, 0.0644, -0.0065
|
||||
bone.roll = -3.1396
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.02')
|
||||
bone.head[:] = 0.0252, -0.0000, 0.0000
|
||||
bone.tail[:] = 0.0324, 0.0627, -0.0065
|
||||
bone.roll = -3.1357
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.01')
|
||||
bone.head[:] = 0.0504, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0703, 0.0508, -0.0065
|
||||
bone.roll = -3.1190
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.04')
|
||||
bone.head[:] = -0.0252, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0286, 0.0606, -0.0065
|
||||
bone.roll = 3.1386
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('palm.05')
|
||||
bone.head[:] = -0.0504, 0.0000, 0.0000
|
||||
bone.tail[:] = -0.0669, 0.0534, -0.0065
|
||||
bone.roll = 3.1239
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
bone = arm.edit_bones.new('thumb')
|
||||
bone.head[:] = 0.0682, -0.0148, 0.0000
|
||||
bone.tail[:] = 0.1063, 0.0242, -0.0065
|
||||
bone.roll = -3.0929
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['palm.01']
|
||||
pbone['type'] = 'palm_curl'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the first in an array of siblings with a matching basename
|
||||
sorted with pointer first, little finger last.
|
||||
eg.
|
||||
[pointer, middle, ring, pinky... ] # any number of fingers
|
||||
'''
|
||||
arm = obj.data
|
||||
|
||||
palm_bone = arm.bones[orig_bone_name]
|
||||
palm_parent = palm_bone.parent
|
||||
palm_base = palm_bone.basename
|
||||
bone_definition = [bone.name for bone in palm_parent.children if bone.basename == palm_base]
|
||||
bone_definition.sort()
|
||||
bone_definition.reverse()
|
||||
|
||||
return [palm_parent.name] + bone_definition
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
for org_bone_name in definitions[1:]:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, org_bone_name, "DEF-%s" % base_names[org_bone_name], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
# XXX. Todo, is this needed if the bone is connected to its parent?
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = org_bone_name
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
arm = obj.data
|
||||
|
||||
children = bone_definition[1:]
|
||||
|
||||
# Make a copy of the pinky
|
||||
# simply assume the pinky has the lowest name
|
||||
pinky_ebone = arm.edit_bones[children[0]]
|
||||
ring_ebone = arm.edit_bones[children[1]]
|
||||
|
||||
# FIXME, why split the second one?
|
||||
base_name = base_names[pinky_ebone.name].rsplit('.', 2)[0]
|
||||
|
||||
control_ebone = copy_bone_simple(arm, pinky_ebone.name, base_name + get_side_name(base_names[pinky_ebone.name]), parent=True)
|
||||
control_name = control_ebone.name
|
||||
|
||||
offset = (pinky_ebone.head - ring_ebone.head)
|
||||
|
||||
control_ebone.translate(offset)
|
||||
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
arm = obj.data
|
||||
control_pbone = obj.pose.bones[control_name]
|
||||
pinky_pbone = obj.pose.bones[children[0]]
|
||||
|
||||
control_pbone.rotation_mode = 'YZX'
|
||||
control_pbone.lock_rotation = False, True, True
|
||||
control_pbone.lock_location = True, True, True
|
||||
|
||||
driver_fcurves = pinky_pbone.driver_add("rotation_euler")
|
||||
|
||||
|
||||
controller_path = control_pbone.path_from_id()
|
||||
|
||||
# add custom prop
|
||||
control_pbone["spread"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(control_pbone, "spread", create=True)
|
||||
prop["soft_min"] = -1.0
|
||||
prop["soft_max"] = 1.0
|
||||
prop["min"] = -1.0
|
||||
prop["max"] = 1.0
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[0].driver
|
||||
driver.type = 'AVERAGE'
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "x"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[1].driver
|
||||
driver.expression = "-x/4.0"
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "x"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
|
||||
# *****
|
||||
driver = driver_fcurves[2].driver
|
||||
driver.expression = "(1.0-cos(x))-s"
|
||||
|
||||
for fcurve in driver_fcurves:
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "x"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + ".rotation_euler[0]"
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "s"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = controller_path + '["spread"]'
|
||||
|
||||
|
||||
for i, child_name in enumerate(children):
|
||||
child_pbone = obj.pose.bones[child_name]
|
||||
child_pbone.rotation_mode = 'YZX'
|
||||
|
||||
if child_name != children[-1] and child_name != children[0]:
|
||||
|
||||
# this is somewhat arbitrary but seems to look good
|
||||
inf = i / (len(children) + 1)
|
||||
inf = 1.0 - inf
|
||||
inf = ((inf * inf) + inf) / 2.0
|
||||
|
||||
# used for X/Y constraint
|
||||
inf_minor = inf * inf
|
||||
|
||||
con = child_pbone.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy Z Rot"
|
||||
con.target = obj
|
||||
con.subtarget = children[0] # also pinky_pbone
|
||||
con.owner_space = con.target_space = 'LOCAL'
|
||||
con.use_x, con.use_y, con.use_z = False, False, True
|
||||
con.influence = inf
|
||||
|
||||
con = child_pbone.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy XY Rot"
|
||||
con.target = obj
|
||||
con.subtarget = children[0] # also pinky_pbone
|
||||
con.owner_space = con.target_space = 'LOCAL'
|
||||
con.use_x, con.use_y, con.use_z = True, True, False
|
||||
con.influence = inf_minor
|
||||
|
||||
|
||||
child_pbone = obj.pose.bones[children[-1]]
|
||||
child_pbone.rotation_mode = 'QUATERNION'
|
||||
|
||||
# fix at the end since there is some trouble with tx info not being updated otherwise
|
||||
def x_direction():
|
||||
# NOTE: the direction of the Z rotation depends on which side the palm is on.
|
||||
# we could do a simple side-of-x test but better to work out the direction
|
||||
# the hand is facing.
|
||||
from mathutils import Vector
|
||||
from math import degrees
|
||||
child_pbone_01 = obj.pose.bones[children[0]].bone
|
||||
child_pbone_02 = obj.pose.bones[children[1]].bone
|
||||
|
||||
rel_vec = child_pbone_01.head - child_pbone_02.head
|
||||
x_vec = child_pbone_01.matrix.rotation_part() * Vector((1.0, 0.0, 0.0))
|
||||
|
||||
return degrees(rel_vec.angle(x_vec)) > 90.0
|
||||
|
||||
if x_direction(): # flip
|
||||
driver.expression = "-(%s)" % driver.expression
|
||||
|
||||
|
||||
# last step setup layers
|
||||
arm.bones[control_name].layers = list(arm.bones[bone_definition[1]].layers)
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
@ -1,320 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "shape_key_control"
|
||||
|
||||
|
||||
def addget_shape_key(obj, name="Key"):
|
||||
""" Fetches a shape key, or creates it if it doesn't exist
|
||||
"""
|
||||
# Create a shapekey set if it doesn't already exist
|
||||
if obj.data.shape_keys is None:
|
||||
shape = obj.add_shape_key(name="Basis", from_mix=False)
|
||||
obj.active_shape_key_index = 0
|
||||
|
||||
# Get the shapekey, or create it if it doesn't already exist
|
||||
if name in obj.data.shape_keys.keys:
|
||||
shape_key = obj.data.shape_keys.keys[name]
|
||||
else:
|
||||
shape_key = obj.add_shape_key(name=name, from_mix=False)
|
||||
|
||||
return shape_key
|
||||
|
||||
|
||||
def addget_shape_key_driver(obj, name="Key"):
|
||||
""" Fetches the driver for the shape key, or creates it if it doesn't
|
||||
already exist.
|
||||
"""
|
||||
driver_path = 'keys["' + name + '"].value'
|
||||
fcurve = None
|
||||
driver = None
|
||||
new = False
|
||||
if obj.data.shape_keys.animation_data is not None:
|
||||
for driver_s in obj.data.shape_keys.animation_data.drivers:
|
||||
if driver_s.data_path == driver_path:
|
||||
fcurve = driver_s
|
||||
if fcurve is None:
|
||||
fcurve = obj.data.shape_keys.keys[name].driver_add("value")
|
||||
fcurve.driver.type = 'AVERAGE'
|
||||
new = True
|
||||
|
||||
return fcurve, new
|
||||
|
||||
|
||||
# TODO:
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
#bpy.ops.object.mode_set(mode='EDIT')
|
||||
#obj = bpy.context.active_object
|
||||
#arm = obj.data
|
||||
#bone = arm.edit_bones.new('Bone')
|
||||
#bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
#bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
#
|
||||
#bpy.ops.object.mode_set(mode='OBJECT')
|
||||
#pbone = obj.pose.bones['Bone']
|
||||
#pbone['type'] = 'copy'
|
||||
pass
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bone = obj.data.bones[orig_bone_name]
|
||||
return [bone.name]
|
||||
|
||||
|
||||
def main(obj, definitions, base_names, options):
|
||||
""" A rig that drives shape keys with the local transforms and/or custom
|
||||
properties of a single bone.
|
||||
A different shape can be driven by the negative value of a transform as
|
||||
well by giving a comma-separated list of two shapes.
|
||||
|
||||
Required options:
|
||||
mesh: name of mesh object(s) to add/get shapekeys to/from
|
||||
(if multiple objects, make a comma-separated list)
|
||||
Optional options:
|
||||
loc_<x/y/z>: name of the shape key to tie to translation of the bone
|
||||
loc_<x/y/z>_fac: default multiplier of the bone influence on the shape key
|
||||
rot_<x/y/z>: name of the shape key to tie to rotation of the bone
|
||||
rot_<x/y/z>_fac: default multiplier of the bone influence on the shape key
|
||||
scale_<x/y/z>: name of the shape key to tie to scale of the bone
|
||||
scale_<x/y/z>_fac: default multiplier of the bone influence on the shape key
|
||||
shape_key_sliders: comma-separated list of custom properties to create sliders out of for driving shape keys
|
||||
<custom_prop>: for each property listed in shape_key_sliders, specify a shape key for it to drive
|
||||
|
||||
"""
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
org_bone = definitions[0]
|
||||
|
||||
# Options
|
||||
req_options = ["mesh"]
|
||||
for option in req_options:
|
||||
if option not in options:
|
||||
raise RigifyError("'%s' rig type requires a '%s' option (bone: %s)" % (RIG_TYPE, option, base_names[definitions[0]]))
|
||||
|
||||
meshes = options["mesh"].replace(" ", "").split(",")
|
||||
|
||||
bone = copy_bone_simple(obj.data, org_bone, base_names[org_bone], parent=True).name
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Set rotation mode and axis locks
|
||||
pb[bone].rotation_mode = pb[org_bone].rotation_mode
|
||||
pb[bone].lock_location = tuple(pb[org_bone].lock_location)
|
||||
pb[bone].lock_rotation = tuple(pb[org_bone].lock_rotation)
|
||||
pb[bone].lock_rotation_w = pb[org_bone].lock_rotation_w
|
||||
pb[bone].lock_rotations_4d = pb[org_bone].lock_rotations_4d
|
||||
pb[bone].lock_scale = tuple(pb[org_bone].lock_scale)
|
||||
|
||||
# List of rig options for specifying shape keys
|
||||
# Append '_fac' to the end for the name of the corresponding 'factor
|
||||
# default' option for that shape
|
||||
shape_key_options = ["loc_x",
|
||||
"loc_y",
|
||||
"loc_z",
|
||||
"rot_x",
|
||||
"rot_y",
|
||||
"rot_z",
|
||||
"scale_x",
|
||||
"scale_y",
|
||||
"scale_z"]
|
||||
|
||||
driver_paths = {"loc_x":".location[0]",
|
||||
"loc_y":".location[1]",
|
||||
"loc_z":".location[2]",
|
||||
"rot_x":".rotation_euler[0]",
|
||||
"rot_y":".rotation_euler[1]",
|
||||
"rot_z":".rotation_euler[2]",
|
||||
"qrot_x":".rotation_quaternion[1]",
|
||||
"qrot_y":".rotation_quaternion[2]",
|
||||
"qrot_z":".rotation_quaternion[3]",
|
||||
"scale_x":".scale[0]",
|
||||
"scale_y":".scale[1]",
|
||||
"scale_z":".scale[2]"}
|
||||
|
||||
# Create the shape keys and drivers for transforms
|
||||
shape_info = []
|
||||
for option in shape_key_options:
|
||||
if option in options:
|
||||
shape_names = options[option].replace(" ", "").split(",")
|
||||
|
||||
var_name = bone.replace(".","").replace("-","_") + "_" + option
|
||||
# Different RNA paths for euler vs quat
|
||||
if option in (shape_key_options[3:6]+shape_key_options[12:15]) \
|
||||
and pb[bone].rotation_mode == 'QUATERNION':
|
||||
var_path = driver_paths['q' + option]
|
||||
else:
|
||||
var_path = driver_paths[option]
|
||||
|
||||
if (option+"_fac") in options:
|
||||
fac = options[option+"_fac"]
|
||||
else:
|
||||
fac = 1.0
|
||||
|
||||
# Positive
|
||||
if shape_names[0] != "":
|
||||
# Different expressions for loc/rot/scale and positive/negative
|
||||
if option in shape_key_options[:3]:
|
||||
# Location
|
||||
expression = var_name + " * " + str(fac)
|
||||
elif option in shape_key_options[3:6]:
|
||||
# Rotation
|
||||
# Different expressions for euler vs quats
|
||||
if pb[bone].rotation_mode == 'QUATERNION':
|
||||
expression = "2 * asin(" + var_name + ") * " + str(fac)
|
||||
else:
|
||||
expression = var_name + " * " + str(fac)
|
||||
elif option in shape_key_options[6:9]:
|
||||
# Scale
|
||||
expression = "(1.0 - " + var_name + ") * " + str(fac) + " * -2"
|
||||
shape_name = shape_names[0]
|
||||
create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression)
|
||||
|
||||
# Negative
|
||||
if shape_names[0] != "" and len(shape_names) > 1:
|
||||
# Different expressions for loc/rot/scale and positive/negative
|
||||
if option in shape_key_options[:3]:
|
||||
# Location
|
||||
expression = var_name + " * " + str(fac) + " * -1"
|
||||
elif option in shape_key_options[3:6]:
|
||||
# Rotation
|
||||
# Different expressions for euler vs quats
|
||||
if pb[bone].rotation_mode == 'QUATERNION':
|
||||
expression = "-2 * asin(" + var_name + ") * " + str(fac)
|
||||
else:
|
||||
expression = var_name + " * " + str(fac) + " * -1"
|
||||
elif option in shape_key_options[6:9]:
|
||||
# Scale
|
||||
expression = "(1.0 - " + var_name + ") * " + str(fac) + " * 2"
|
||||
shape_name = shape_names[1]
|
||||
create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression)
|
||||
|
||||
# Create the shape keys and drivers for custom-property sliders
|
||||
if "shape_key_sliders" in options:
|
||||
# Get the slider names
|
||||
slider_names = options["shape_key_sliders"].replace(" ", "").split(",")
|
||||
if slider_names[0] != "":
|
||||
# Loop through the slider names and check if they have
|
||||
# shape keys specified for them, and if so, set them up.
|
||||
for slider_name in slider_names:
|
||||
if slider_name in options:
|
||||
shape_names = options[slider_name].replace(" ", "").split(",")
|
||||
|
||||
# Set up the custom property on the bone
|
||||
prop = rna_idprop_ui_prop_get(pb[bone], slider_name, create=True)
|
||||
pb[bone][slider_name] = 0.0
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
if len(shape_names) > 1:
|
||||
prop["min"] = -1.0
|
||||
prop["soft_min"] = -1.0
|
||||
|
||||
# Add the shape drivers
|
||||
# Positive
|
||||
if shape_names[0] != "":
|
||||
# Set up the variables for creating the shape key driver
|
||||
shape_name = shape_names[0]
|
||||
var_name = slider_name.replace(".", "_").replace("-", "_")
|
||||
var_path = '["' + slider_name + '"]'
|
||||
if slider_name + "_fac" in options:
|
||||
fac = options[slider_name + "_fac"]
|
||||
else:
|
||||
fac = 1.0
|
||||
expression = var_name + " * " + str(fac)
|
||||
# Create the shape key driver
|
||||
create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression)
|
||||
# Negative
|
||||
if shape_names[0] != "" and len(shape_names) > 1:
|
||||
# Set up the variables for creating the shape key driver
|
||||
shape_name = shape_names[1]
|
||||
var_name = slider_name.replace(".", "_").replace("-", "_")
|
||||
var_path = '["' + slider_name + '"]'
|
||||
if slider_name + "_fac" in options:
|
||||
fac = options[slider_name + "_fac"]
|
||||
else:
|
||||
fac = 1.0
|
||||
expression = var_name + " * " + str(fac) + " * -1"
|
||||
# Create the shape key driver
|
||||
create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression)
|
||||
|
||||
|
||||
# Org bone copy transforms of control bone
|
||||
con = pb[org_bone].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = bone
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
def create_shape_and_driver(obj, bone, meshes, shape_name, var_name, var_path, expression):
|
||||
""" Creates/gets a shape key and sets up a driver for it.
|
||||
|
||||
obj = armature object
|
||||
bone = driving bone name
|
||||
meshes = list of meshes to create the shapekey/driver on
|
||||
shape_name = name of the shape key
|
||||
var_name = name of the driving variable
|
||||
var_path = path to the property on the bone to drive with
|
||||
expression = python expression for the driver
|
||||
"""
|
||||
pb = obj.pose.bones
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
|
||||
# Add/get the shape key
|
||||
shape = addget_shape_key(mesh_obj, name=shape_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve, a = addget_shape_key_driver(mesh_obj, name=shape_name)
|
||||
|
||||
# Set up the driver
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = expression
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "SINGLE_PROP"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = 'pose.bones["' + bone + '"]' + var_path
|
||||
|
||||
|
@ -1,172 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "shape_key_distance"
|
||||
|
||||
|
||||
def addget_shape_key(obj, name="Key"):
|
||||
""" Fetches a shape key, or creates it if it doesn't exist
|
||||
"""
|
||||
# Create a shapekey set if it doesn't already exist
|
||||
if obj.data.shape_keys is None:
|
||||
shape = obj.add_shape_key(name="Basis", from_mix=False)
|
||||
obj.active_shape_key_index = 0
|
||||
|
||||
# Get the shapekey, or create it if it doesn't already exist
|
||||
if name in obj.data.shape_keys.keys:
|
||||
shape_key = obj.data.shape_keys.keys[name]
|
||||
else:
|
||||
shape_key = obj.add_shape_key(name=name, from_mix=False)
|
||||
|
||||
return shape_key
|
||||
|
||||
|
||||
def addget_shape_key_driver(obj, name="Key"):
|
||||
""" Fetches the driver for the shape key, or creates it if it doesn't
|
||||
already exist.
|
||||
"""
|
||||
driver_path = 'keys["' + name + '"].value'
|
||||
fcurve = None
|
||||
driver = None
|
||||
if obj.data.shape_keys.animation_data is not None:
|
||||
for driver_s in obj.data.shape_keys.animation_data.drivers:
|
||||
if driver_s.data_path == driver_path:
|
||||
fcurve = driver_s
|
||||
if fcurve is None:
|
||||
fcurve = obj.data.shape_keys.keys[name].driver_add("value")
|
||||
fcurve.driver.type = 'AVERAGE'
|
||||
|
||||
return fcurve
|
||||
|
||||
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bone = obj.data.bones[orig_bone_name]
|
||||
return [bone.name]
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
eb = obj.data.edit_bones
|
||||
|
||||
bone_from = definitions[0]
|
||||
|
||||
|
||||
# Options
|
||||
req_options = ["to", "mesh", "shape_key"]
|
||||
for option in req_options:
|
||||
if option not in options:
|
||||
raise RigifyError("'%s' rig type requires a '%s' option (bone: %s)" % (RIG_TYPE, option, base_names[definitions[0]]))
|
||||
|
||||
bone_to = "ORG-" + options["to"]
|
||||
meshes = options["mesh"].replace(" ", "").split(",")
|
||||
shape_key_name = options["shape_key"]
|
||||
|
||||
if "dmul" in options:
|
||||
shape_blend_fac = options["dmul"]
|
||||
else:
|
||||
shape_blend_fac = 1.0
|
||||
|
||||
|
||||
# Calculate the distance between the bones
|
||||
distance = (eb[bone_from].head - eb[bone_to].head).length
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# For every listed mesh object
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
|
||||
# Add/get the shape key
|
||||
shape_key = addget_shape_key(mesh_obj, name=shape_key_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve = addget_shape_key_driver(mesh_obj, name=shape_key_name)
|
||||
driver = fcurve.driver
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
var_name = base_names[bone_from]
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "LOC_DIFF"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].bone_target = bone_from
|
||||
var.targets[1].id_type = 'OBJECT'
|
||||
var.targets[1].id = obj
|
||||
var.targets[1].bone_target = bone_to
|
||||
|
||||
# Set fcurve offset, so zero is at the rest distance
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
if distance > 0.00001:
|
||||
mod.coefficients[0] = -shape_blend_fac
|
||||
mod.coefficients[1] = shape_blend_fac / distance
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
""" options:
|
||||
mesh: name of mesh object with the shape key
|
||||
shape_key: name of shape key
|
||||
to: name of bone to measure distance from
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control rig
|
||||
#control(obj, bone_definition, base_names, options)
|
||||
# Create deform rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (None,)
|
||||
|
@ -1,172 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
|
||||
#METARIG_NAMES = ("cpy",)
|
||||
RIG_TYPE = "shape_key_rotdiff"
|
||||
|
||||
|
||||
def addget_shape_key(obj, name="Key"):
|
||||
""" Fetches a shape key, or creates it if it doesn't exist
|
||||
"""
|
||||
# Create a shapekey set if it doesn't already exist
|
||||
if obj.data.shape_keys is None:
|
||||
shape = obj.add_shape_key(name="Basis", from_mix=False)
|
||||
obj.active_shape_key_index = 0
|
||||
|
||||
# Get the shapekey, or create it if it doesn't already exist
|
||||
if name in obj.data.shape_keys.keys:
|
||||
shape_key = obj.data.shape_keys.keys[name]
|
||||
else:
|
||||
shape_key = obj.add_shape_key(name=name, from_mix=False)
|
||||
|
||||
return shape_key
|
||||
|
||||
|
||||
def addget_shape_key_driver(obj, name="Key"):
|
||||
""" Fetches the driver for the shape key, or creates it if it doesn't
|
||||
already exist.
|
||||
"""
|
||||
driver_path = 'keys["' + name + '"].value'
|
||||
fcurve = None
|
||||
driver = None
|
||||
if obj.data.shape_keys.animation_data is not None:
|
||||
for driver_s in obj.data.shape_keys.animation_data.drivers:
|
||||
if driver_s.data_path == driver_path:
|
||||
fcurve = driver_s
|
||||
if fcurve is None:
|
||||
fcurve = obj.data.shape_keys.keys[name].driver_add("value")
|
||||
fcurve.driver.type = 'AVERAGE'
|
||||
|
||||
return fcurve
|
||||
|
||||
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('Bone')
|
||||
bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['Bone']
|
||||
pbone['type'] = 'copy'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
bone = obj.data.bones[orig_bone_name]
|
||||
return [bone.name]
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
eb = obj.data.edit_bones
|
||||
|
||||
bone_from = definitions[0]
|
||||
|
||||
|
||||
# Options
|
||||
req_options = ["to", "mesh", "shape_key"]
|
||||
for option in req_options:
|
||||
if option not in options:
|
||||
raise RigifyError("'%s' rig type requires a '%s' option (bone: %s)" % (RIG_TYPE, option, base_names[definitions[0]]))
|
||||
|
||||
bone_to = "ORG-" + options["to"]
|
||||
meshes = options["mesh"].replace(" ", "").split(",")
|
||||
shape_key_name = options["shape_key"]
|
||||
|
||||
if "dmul" in options:
|
||||
shape_blend_fac = options["dmul"]
|
||||
else:
|
||||
shape_blend_fac = 1.0
|
||||
|
||||
|
||||
# Calculate the rotation difference between the bones
|
||||
rotdiff = (eb[bone_from].matrix.to_quat() * eb[bone_to].matrix.to_quat()) * 2
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# For every listed mesh object
|
||||
for mesh_name in meshes:
|
||||
mesh_obj = bpy.data.objects[mesh_name]
|
||||
|
||||
# Add/get the shape key
|
||||
shape_key = addget_shape_key(mesh_obj, name=shape_key_name)
|
||||
|
||||
# Add/get the shape key driver
|
||||
fcurve = addget_shape_key_driver(mesh_obj, name=shape_key_name)
|
||||
driver = fcurve.driver
|
||||
|
||||
# Get the variable, or create it if it doesn't already exist
|
||||
var_name = base_names[bone_from]
|
||||
if var_name in driver.variables:
|
||||
var = driver.variables[var_name]
|
||||
else:
|
||||
var = driver.variables.new()
|
||||
var.name = var_name
|
||||
|
||||
# Set up the variable
|
||||
var.type = "ROTATION_DIFF"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].bone_target = bone_from
|
||||
var.targets[1].id_type = 'OBJECT'
|
||||
var.targets[1].id = obj
|
||||
var.targets[1].bone_target = bone_to
|
||||
|
||||
# Set fcurve offset, so zero is at the rest distance
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
if rotdiff > 0.00001:
|
||||
mod.coefficients[0] = -shape_blend_fac
|
||||
mod.coefficients[1] = shape_blend_fac / rotdiff
|
||||
|
||||
return (None,)
|
||||
|
||||
|
||||
|
||||
|
||||
def control(obj, definitions, base_names, options):
|
||||
""" options:
|
||||
mesh: name of mesh object with the shape key
|
||||
shape_key: name of shape key
|
||||
to: name of bone to measure distance from
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
# Create control rig
|
||||
#control(obj, bone_definition, base_names, options)
|
||||
# Create deform rig
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
return (None,)
|
||||
|
@ -1,481 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = ("pelvis", "ribcage")
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('pelvis')
|
||||
bone.head[:] = 0.0000, -0.0306, 0.1039
|
||||
bone.tail[:] = 0.0000, -0.0306, -0.0159
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('rib_cage')
|
||||
bone.head[:] = 0.0000, -0.0306, 0.1039
|
||||
bone.tail[:] = 0.0000, -0.0306, 0.2236
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['pelvis']
|
||||
bone = arm.edit_bones.new('spine.01')
|
||||
bone.head[:] = 0.0000, 0.0000, -0.0000
|
||||
bone.tail[:] = 0.0000, -0.0306, 0.1039
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['rib_cage']
|
||||
bone = arm.edit_bones.new('spine.02')
|
||||
bone.head[:] = 0.0000, -0.0306, 0.1039
|
||||
bone.tail[:] = -0.0000, -0.0398, 0.2045
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.01']
|
||||
bone = arm.edit_bones.new('spine.03')
|
||||
bone.head[:] = -0.0000, -0.0398, 0.2045
|
||||
bone.tail[:] = -0.0000, -0.0094, 0.2893
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.02']
|
||||
bone = arm.edit_bones.new('spine.04')
|
||||
bone.head[:] = -0.0000, -0.0094, 0.2893
|
||||
bone.tail[:] = -0.0000, 0.0335, 0.3595
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.03']
|
||||
bone = arm.edit_bones.new('spine.05')
|
||||
bone.head[:] = -0.0000, 0.0335, 0.3595
|
||||
bone.tail[:] = -0.0000, 0.0555, 0.4327
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.04']
|
||||
bone = arm.edit_bones.new('spine.06')
|
||||
bone.head[:] = -0.0000, 0.0555, 0.4327
|
||||
bone.tail[:] = -0.0000, 0.0440, 0.5207
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.05']
|
||||
bone = arm.edit_bones.new('spine.07')
|
||||
bone.head[:] = -0.0000, 0.0440, 0.5207
|
||||
bone.tail[:] = -0.0000, 0.0021, 0.5992
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.06']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['rib_cage']
|
||||
pbone['type'] = 'spine_pivot_flex'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the second in a chain.
|
||||
Expects at least 1 parent and a chain of children withe the same basename
|
||||
eg.
|
||||
pelvis -> rib_cage -> spine.01 -> spine.02 -> spine.03
|
||||
|
||||
note: same as neck.
|
||||
'''
|
||||
arm = obj.data
|
||||
ribcage = arm.bones[orig_bone_name]
|
||||
pelvis = ribcage.parent
|
||||
|
||||
if pelvis is None:
|
||||
raise RigifyError("expected the ribcage bone:'%s' to have a parent (ribcage)." % ribcage.name)
|
||||
|
||||
children = ribcage.children
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the ribcage to have only 1 child.")
|
||||
|
||||
child = children[0]
|
||||
|
||||
bone_definition = [pelvis.name, ribcage.name, child.name]
|
||||
bone_definition.extend([child.name for child in child.children_recursive_basename])
|
||||
return bone_definition
|
||||
|
||||
|
||||
def fk(*args):
|
||||
main(*args)
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
for org_bone_name in definitions[2:]:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, org_bone_name, "DEF-%s" % base_names[org_bone_name], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
# XXX. Todo, is this needed if the bone is connected to its parent?
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = org_bone_name
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
from mathutils import Vector, Matrix
|
||||
from math import radians, pi
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# Initialize container classes for convenience
|
||||
mt = bone_class_instance(obj, ["pelvis", "ribcage"]) # meta
|
||||
mt.pelvis = bone_definition[0]
|
||||
mt.ribcage = bone_definition[1]
|
||||
mt.update()
|
||||
|
||||
spine_chain_orig = tuple(bone_definition[2:])
|
||||
spine_chain = [arm.edit_bones[child_name] for child_name in spine_chain_orig]
|
||||
spine_chain_basename = base_names[spine_chain[0].name].rsplit(".", 1)[0] # probably 'ORG-spine.01' -> 'spine'
|
||||
spine_chain_len = len(spine_chain_orig)
|
||||
|
||||
child = spine_chain[0]
|
||||
spine_chain_segment_length = child.length
|
||||
#child.parent = mt.pelvis_e # was mt.ribcage
|
||||
|
||||
# The first bone in the chain happens to be the basis of others, create them now
|
||||
ex = bone_class_instance(obj, ["pelvis_copy", "ribcage_hinge", "ribcage_copy", "spine_rotate"])
|
||||
|
||||
ex.pelvis_copy_e = copy_bone_simple(arm, mt.pelvis, base_names[mt.pelvis]) # no parent
|
||||
ex.pelvis_copy = ex.pelvis_copy_e.name
|
||||
ex.pelvis_copy_e.use_local_location = False
|
||||
|
||||
# copy the pelvis, offset to make MCH-spine_rotate and MCH-ribcage_hinge
|
||||
ex.ribcage_hinge_e = copy_bone_simple(arm, mt.pelvis, "MCH-%s_hinge" % base_names[mt.ribcage])
|
||||
ex.ribcage_hinge = ex.ribcage_hinge_e.name
|
||||
ex.ribcage_hinge_e.translate(Vector((0.0, spine_chain_segment_length / 4.0, 0.0)))
|
||||
|
||||
ex.spine_rotate_e = copy_bone_simple(arm, mt.ribcage, "MCH-%s_rotate" % spine_chain_basename)
|
||||
ex.spine_rotate = ex.spine_rotate_e.name
|
||||
ex.spine_rotate_e.translate(Vector((0.0, spine_chain_segment_length / 2.0, 0.0)))
|
||||
ex.spine_rotate_e.use_connect = False
|
||||
ex.spine_rotate_e.parent = ex.pelvis_copy_e
|
||||
|
||||
|
||||
# Copy the last bone now
|
||||
child = spine_chain[-1]
|
||||
|
||||
ex.ribcage_copy_e = copy_bone_simple(arm, mt.ribcage, base_names[mt.ribcage])
|
||||
ex.ribcage_copy = ex.ribcage_copy_e.name
|
||||
ex.ribcage_copy_e.use_connect = False
|
||||
ex.ribcage_copy_e.parent = ex.ribcage_hinge_e
|
||||
|
||||
spine_chain = [child.name for child in spine_chain]
|
||||
|
||||
# We have 3 spine chains
|
||||
# - original (ORG_*)
|
||||
# - copy (*use original name*)
|
||||
# - reverse (MCH-rev_*)
|
||||
spine_chain_attrs = [("spine_%.2d" % (i + 1)) for i in range(spine_chain_len)]
|
||||
|
||||
mt_chain = bone_class_instance(obj, spine_chain_attrs) # ORG_*
|
||||
rv_chain = bone_class_instance(obj, spine_chain_attrs) # *
|
||||
ex_chain = bone_class_instance(obj, spine_chain_attrs) # MCH-rev_*
|
||||
del spine_chain_attrs
|
||||
|
||||
for i, child_name in enumerate(spine_chain):
|
||||
child_name_orig = base_names[spine_chain_orig[i]]
|
||||
|
||||
attr = mt_chain.attr_names[i] # eg. spine_04
|
||||
|
||||
setattr(mt_chain, attr, spine_chain_orig[i]) # the original bone
|
||||
|
||||
ebone = copy_bone_simple(arm, child_name, child_name_orig) # use the original name
|
||||
setattr(ex_chain, attr, ebone.name)
|
||||
|
||||
ebone = copy_bone_simple(arm, child_name, "MCH-rev_%s" % child_name_orig)
|
||||
setattr(rv_chain, attr, ebone.name)
|
||||
ebone.use_connect = False
|
||||
|
||||
mt_chain.update()
|
||||
ex_chain.update()
|
||||
rv_chain.update()
|
||||
|
||||
# Now we need to re-parent these chains
|
||||
for i, child_name in enumerate(spine_chain_orig):
|
||||
attr = ex_chain.attr_names[i] + "_e"
|
||||
ebone = getattr(ex_chain, attr)
|
||||
if i == 0:
|
||||
ebone.use_connect = False
|
||||
ebone.parent = ex.pelvis_copy_e
|
||||
else:
|
||||
attr_parent = ex_chain.attr_names[i - 1] + "_e"
|
||||
ebone.parent = getattr(ex_chain, attr_parent)
|
||||
|
||||
# intentional! get the parent from the other parallel chain member
|
||||
getattr(rv_chain, attr).parent = ebone
|
||||
|
||||
|
||||
# ex_chain needs to interlace bones!
|
||||
# Note, skip the first bone
|
||||
for i in range(1, spine_chain_len): # similar to neck
|
||||
child_name_orig = base_names[spine_chain_orig[i]]
|
||||
spine_e = getattr(mt_chain, mt_chain.attr_names[i] + "_e")
|
||||
|
||||
# dont store parent names, re-reference as each chain bones parent.
|
||||
spine_e_parent = arm.edit_bones.new("MCH-rot_%s" % child_name_orig)
|
||||
spine_e_parent.head = spine_e.head
|
||||
spine_e_parent.tail = spine_e.head + (mt.ribcage_e.vector.normalize() * spine_chain_segment_length / 2.0)
|
||||
spine_e_parent.roll = mt.ribcage_e.roll
|
||||
|
||||
|
||||
spine_e = getattr(ex_chain, ex_chain.attr_names[i] + "_e")
|
||||
orig_parent = spine_e.parent
|
||||
spine_e.use_connect = False
|
||||
spine_e.parent = spine_e_parent
|
||||
spine_e_parent.use_connect = False
|
||||
|
||||
spine_e_parent.parent = orig_parent
|
||||
|
||||
|
||||
# Rotate the rev chain 180 about the by the first bones center point
|
||||
pivot = (rv_chain.spine_01_e.head + rv_chain.spine_01_e.tail) * 0.5
|
||||
matrix = Matrix.Rotation(radians(180), 3, 'X')
|
||||
for i, attr in enumerate(rv_chain.attr_names): # similar to neck
|
||||
spine_e = getattr(rv_chain, attr + "_e")
|
||||
# use the first bone as the pivot
|
||||
|
||||
spine_e.head = ((spine_e.head - pivot) * matrix) + pivot
|
||||
spine_e.tail = ((spine_e.tail - pivot) * matrix) + pivot
|
||||
spine_e.roll += pi # 180d roll
|
||||
del spine_e
|
||||
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# refresh pose bones
|
||||
mt.update()
|
||||
ex.update()
|
||||
mt_chain.update()
|
||||
ex_chain.update()
|
||||
rv_chain.update()
|
||||
|
||||
# Axis locks
|
||||
ex.ribcage_copy_p.lock_location = True, True, True
|
||||
|
||||
con = ex.ribcage_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = ex.pelvis_copy
|
||||
|
||||
# add driver
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = ex.ribcage_copy_p.path_from_id() + '["hinge"]'
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
con = ex.spine_rotate_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.ribcage_copy
|
||||
|
||||
# ex.pelvis_copy_p / rib_cage
|
||||
con = ex.ribcage_copy_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.pelvis_copy
|
||||
con.head_tail = 0.0
|
||||
|
||||
# This stores all important ID props
|
||||
prop = rna_idprop_ui_prop_get(ex.ribcage_copy_p, "hinge", create=True)
|
||||
ex.ribcage_copy_p["hinge"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
prop = rna_idprop_ui_prop_get(ex.ribcage_copy_p, "pivot_slide", create=True)
|
||||
ex.ribcage_copy_p["pivot_slide"] = 1.0 / spine_chain_len
|
||||
prop["soft_min"] = 1.0 / spine_chain_len
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
|
||||
# Create a fake connected parent/child relationship with bone location constraints
|
||||
# positioned at the tip.
|
||||
|
||||
# reverse bones / MCH-rev_spine.##
|
||||
for i in range(1, spine_chain_len):
|
||||
spine_p = getattr(rv_chain, rv_chain.attr_names[i] + "_p")
|
||||
spine_fake_parent_name = getattr(rv_chain, rv_chain.attr_names[i - 1])
|
||||
|
||||
con = spine_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = spine_fake_parent_name
|
||||
con.head_tail = 1.0
|
||||
del spine_p, spine_fake_parent_name, con
|
||||
|
||||
|
||||
# Constrain 'inbetween' bones
|
||||
target_names = [("b%.2d" % (i + 1)) for i in range(spine_chain_len - 1)]
|
||||
rib_driver_path = ex.ribcage_copy_p.path_from_id()
|
||||
|
||||
ex.ribcage_copy_p["bend_tot"] = 0.0
|
||||
fcurve = ex.ribcage_copy_p.driver_add('["bend_tot"]')
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SUM'
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
for i in range(spine_chain_len - 1):
|
||||
var = driver.variables.new()
|
||||
var.name = target_names[i]
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = rib_driver_path + ('["bend_%.2d"]' % (i + 1))
|
||||
|
||||
for i in range(1, spine_chain_len):
|
||||
|
||||
# Add bend prop
|
||||
prop_name = "bend_%.2d" % i
|
||||
prop = rna_idprop_ui_prop_get(ex.ribcage_copy_p, prop_name, create=True)
|
||||
if ("bend_%.2d" % i) in options:
|
||||
ex.ribcage_copy_p[prop_name] = options["bend_%.2d" % i]
|
||||
else:
|
||||
ex.ribcage_copy_p[prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
spine_p = getattr(ex_chain, ex_chain.attr_names[i] + "_p")
|
||||
spine_p_parent = spine_p.parent # interlaced bone
|
||||
|
||||
con = spine_p_parent.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.spine_rotate
|
||||
con.owner_space = 'LOCAL'
|
||||
con.target_space = 'LOCAL'
|
||||
del spine_p
|
||||
|
||||
# add driver
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = "bend/bend_tot"
|
||||
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
|
||||
# add target
|
||||
var = driver.variables.new()
|
||||
var.name = "bend_tot"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = rib_driver_path + ('["bend_tot"]')
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "bend"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = rib_driver_path + ('["%s"]' % prop_name)
|
||||
|
||||
|
||||
|
||||
# original bone drivers
|
||||
# note: the first bone has a lot more constraints, but also this simple one is first.
|
||||
for i, attr in enumerate(mt_chain.attr_names):
|
||||
spine_p = getattr(mt_chain, attr + "_p")
|
||||
|
||||
con = spine_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = getattr(ex_chain, attr) # lock to the copy's rotation
|
||||
del spine_p
|
||||
|
||||
# pivot slide: - lots of copy location constraints.
|
||||
|
||||
con = mt_chain.spine_01_p.constraints.new('COPY_LOCATION')
|
||||
con.name = "base"
|
||||
con.target = obj
|
||||
con.subtarget = rv_chain.spine_01 # lock to the reverse location
|
||||
|
||||
for i in range(1, spine_chain_len + 1):
|
||||
con = mt_chain.spine_01_p.constraints.new('COPY_LOCATION')
|
||||
con.name = "slide_%d" % i
|
||||
con.target = obj
|
||||
|
||||
if i == spine_chain_len:
|
||||
attr = mt_chain.attr_names[i - 1]
|
||||
else:
|
||||
attr = mt_chain.attr_names[i]
|
||||
|
||||
con.subtarget = getattr(rv_chain, attr) # lock to the reverse location
|
||||
|
||||
if i == spine_chain_len:
|
||||
con.head_tail = 1.0
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = rib_driver_path + '["pivot_slide"]'
|
||||
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = - (i - 1)
|
||||
mod.coefficients[1] = spine_chain_len
|
||||
|
||||
|
||||
# Set pelvis and ribcage controls to use the first and last bone in the
|
||||
# spine respectively for their custom shape transform
|
||||
ex.ribcage_copy_p.custom_shape_transform = obj.pose.bones[bone_definition[len(bone_definition)-1]]
|
||||
ex.pelvis_copy_p.custom_shape_transform = obj.pose.bones[bone_definition[2]]
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "ex_layer" in options:
|
||||
layer = [n == options["ex_layer"] for n in range(0, 32)]
|
||||
else:
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layers = layer
|
||||
for attr in ex_chain.attr_names:
|
||||
getattr(ex_chain, attr + "_b").layers = layer
|
||||
for attr in rv_chain.attr_names:
|
||||
getattr(rv_chain, attr + "_b").layers = layer
|
||||
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
arm.bones[ex.pelvis_copy].layers = layer
|
||||
arm.bones[ex.ribcage_copy].layers = layer
|
||||
|
||||
# no support for blending chains
|
||||
return None
|
@ -1,109 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
METARIG_NAMES = tuple()
|
||||
RIG_TYPE = "stretch"
|
||||
|
||||
# TODO
|
||||
#def metarig_template():
|
||||
# # generated by rigify.write_meta_rig
|
||||
# bpy.ops.object.mode_set(mode='EDIT')
|
||||
# obj = bpy.context.active_object
|
||||
# arm = obj.data
|
||||
# bone = arm.edit_bones.new('Bone')
|
||||
# bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
# bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
# bone.roll = 0.0000
|
||||
# bone.use_connect = False
|
||||
#
|
||||
# bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# pbone = obj.pose.bones['Bone']
|
||||
# pbone['type'] = 'copy'
|
||||
|
||||
bool_map = {0: False, 1: True,
|
||||
0.0: False, 1.0: True,
|
||||
"false": False, "true": True,
|
||||
"False": False, "True": True,
|
||||
"no": False, "yes": True,
|
||||
"No": False, "Yes": True}
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return (orig_bone_name,)
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
""" A stretchy bone from one bone to another.
|
||||
Deformation only (no controls).
|
||||
"""
|
||||
# Verify required parameter
|
||||
if "to" not in options:
|
||||
raise RigifyError("'%s' rig type requires a 'to' parameter (bone: %s)" % (RIG_TYPE, base_names[bone_definition[0]]))
|
||||
if type(options["to"]) is not str:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must be a string (bone: %s)" % (RIG_TYPE, base_names[bone_definition[0]]))
|
||||
if ("ORG-" + options["to"]) not in obj.data.bones:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must name a bone in the metarig (bone: %s)" % (RIG_TYPE, base_names[bone_definition[0]]))
|
||||
|
||||
preserve_volume = None
|
||||
# Check optional parameter
|
||||
if "preserve_volume" in options:
|
||||
try:
|
||||
preserve_volume = bool_map[options["preserve_volume"]]
|
||||
except KeyError:
|
||||
preserve_volume = False
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
arm = obj.data
|
||||
|
||||
mbone1 = bone_definition[0]
|
||||
mbone2 = "ORG-" + options["to"]
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone1, "DEF-%s" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone1]
|
||||
bone_e.tail = eb[mbone2].head
|
||||
bone = bone_e.name
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
con = pb[bone].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
|
||||
con = pb[bone].constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
con.rest_length = bb[bone].length
|
||||
if preserve_volume:
|
||||
con.volume = 'VOLUME_XZX'
|
||||
else:
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
return tuple()
|
@ -1,152 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
METARIG_NAMES = tuple()
|
||||
RIG_TYPE = "stretch_twist"
|
||||
|
||||
# TODO
|
||||
#def metarig_template():
|
||||
# # generated by rigify.write_meta_rig
|
||||
# bpy.ops.object.mode_set(mode='EDIT')
|
||||
# obj = bpy.context.active_object
|
||||
# arm = obj.data
|
||||
# bone = arm.edit_bones.new('Bone')
|
||||
# bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
# bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
# bone.roll = 0.0000
|
||||
# bone.use_connect = False
|
||||
#
|
||||
# bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# pbone = obj.pose.bones['Bone']
|
||||
# pbone['type'] = 'copy'
|
||||
|
||||
bool_map = {0:False, 1:True,
|
||||
0.0:False, 1.0:True,
|
||||
"false":False, "true":True,
|
||||
"False":False, "True":True,
|
||||
"no":False, "yes":True,
|
||||
"No":False, "Yes":True}
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return (orig_bone_name,)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
""" A dual-bone stretchy bone setup. Each half follows the twist of the
|
||||
bone on its side.
|
||||
Deformation only (no controls).
|
||||
"""
|
||||
# Verify required parameter
|
||||
if "to" not in options:
|
||||
raise RigifyError("'%s' rig type requires a 'to' parameter (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if type(options["to"]) is not str:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must be a string (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if ("ORG-" + options["to"]) not in obj.data.bones:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must name a bone in the metarig (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
|
||||
preserve_volume = None
|
||||
# Check optional parameter
|
||||
if "preserve_volume" in options:
|
||||
try:
|
||||
preserve_volume = bool_map[options["preserve_volume"]]
|
||||
except KeyError:
|
||||
preserve_volume = False
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
arm = obj.data
|
||||
|
||||
mbone1 = bone_definition[0]
|
||||
mbone2 = "ORG-" + options["to"]
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone1, "MCH-%s" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = None
|
||||
bone_e.head = (eb[mbone1].head + eb[mbone2].head) / 2
|
||||
bone_e.tail = (bone_e.head[0], bone_e.head[1], bone_e.head[2]+0.1)
|
||||
mid_bone = bone_e.name
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone1, "DEF-%s.01" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone1]
|
||||
bone_e.tail = eb[mid_bone].head
|
||||
bone1 = bone_e.name
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone2, "DEF-%s.02" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone2]
|
||||
bone_e.tail = eb[mid_bone].head
|
||||
bone2 = bone_e.name
|
||||
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
|
||||
# Mid bone
|
||||
con = pb[mid_bone].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = mbone1
|
||||
|
||||
con = pb[mid_bone].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
con.influence = 0.5
|
||||
|
||||
# Bone 1
|
||||
con = pb[bone1].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mid_bone
|
||||
|
||||
con = pb[bone1].constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = mid_bone
|
||||
con.rest_length = bb[bone1].length
|
||||
if preserve_volume:
|
||||
con.volume = 'VOLUME_XZX'
|
||||
else:
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
# Bone 2
|
||||
con = pb[bone2].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mid_bone
|
||||
|
||||
con = pb[bone2].constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = mid_bone
|
||||
con.rest_length = bb[bone2].length
|
||||
if preserve_volume:
|
||||
con.volume = 'VOLUME_XZX'
|
||||
else:
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
return tuple()
|
||||
|
@ -1,165 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
from mathutils import Vector, Matrix
|
||||
from math import radians, pi
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = ("pelvis", "ribcage")
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# TODO
|
||||
pass
|
||||
# generated by rigify.write_meta_rig
|
||||
#bpy.ops.object.mode_set(mode='EDIT')
|
||||
#obj = bpy.context.active_object
|
||||
#arm = obj.data
|
||||
#bone = arm.edit_bones.new('tail.01')
|
||||
#bone.head[:] = 0.0000, -0.0306, 0.1039
|
||||
#bone.tail[:] = 0.0000, -0.0306, -0.0159
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
|
||||
#bpy.ops.object.mode_set(mode='OBJECT')
|
||||
#pbone = obj.pose.bones['tail.01']
|
||||
#pbone['type'] = 'tail_spline_ik'
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
""" Collects and returns the relevent bones for the rig.
|
||||
The bone given is the first in the chain of tail bones.
|
||||
It includes bones in the chain up until it hits a bone that doesn't
|
||||
have the same name base.
|
||||
|
||||
tail.01 -> tail.02 -> tail.03 -> ... -> tail.n
|
||||
"""
|
||||
arm = obj.data
|
||||
tail_base = arm.bones[orig_bone_name]
|
||||
|
||||
if tail_base.parent is None:
|
||||
raise RigifyError("'tail_control' rig type on bone '%s' requires a parent." % orig_bone_name)
|
||||
|
||||
bone_definitions = [tail_base.name]
|
||||
bone_definitions.extend([child.name for child in tail_base.children_recursive_basename])
|
||||
return bone_definitions
|
||||
|
||||
|
||||
def main(obj, bone_definitions, base_names, options):
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
arm = obj.data
|
||||
bb = obj.data.bones
|
||||
eb = obj.data.edit_bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
# Create bones for hinge/free
|
||||
# hinge 1 sticks with the parent
|
||||
# hinge 2 is the parent of the tail controls
|
||||
hinge1 = copy_bone_simple(arm, bone_definitions[0], "MCH-%s.hinge1" % base_names[bone_definitions[0]], parent=True).name
|
||||
hinge2 = copy_bone_simple(arm, bone_definitions[0], "MCH-%s.hinge2" % base_names[bone_definitions[0]], parent=False).name
|
||||
|
||||
# Create tail control bones
|
||||
bones = []
|
||||
i = 0
|
||||
for bone_def in bone_definitions:
|
||||
bone = copy_bone_simple(arm, bone_def, base_names[bone_def], parent=True).name
|
||||
if i == 1: # Don't change parent of first tail bone
|
||||
eb[bone].use_connect = False
|
||||
eb[bone].parent = eb[hinge2]
|
||||
eb[bone].use_local_location = False
|
||||
i = 1
|
||||
bones += [bone]
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Rotation mode and axis locks
|
||||
for bone, org_bone in zip(bones, bone_definitions):
|
||||
pb[bone].rotation_mode = pb[org_bone].rotation_mode
|
||||
pb[bone].lock_location = tuple(pb[org_bone].lock_location)
|
||||
pb[bone].lock_rotations_4d = pb[org_bone].lock_rotations_4d
|
||||
pb[bone].lock_rotation = tuple(pb[org_bone].lock_rotation)
|
||||
pb[bone].lock_rotation_w = pb[org_bone].lock_rotation_w
|
||||
pb[bone].lock_scale = tuple(pb[org_bone].lock_scale)
|
||||
|
||||
# Add custom properties
|
||||
pb[bones[0]]["hinge"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(pb[bones[0]], "hinge", create=True)
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
pb[bones[0]]["free"] = 0.0
|
||||
prop = rna_idprop_ui_prop_get(pb[bones[0]], "free", create=True)
|
||||
prop["min"] = 0.0
|
||||
prop["max"] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# Add constraints
|
||||
for bone, org_bone in zip(bones, bone_definitions):
|
||||
con = pb[org_bone].constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = bone
|
||||
|
||||
con_f = pb[hinge2].constraints.new('COPY_LOCATION')
|
||||
con_f.target = obj
|
||||
con_f.subtarget = hinge1
|
||||
|
||||
con_h = pb[hinge2].constraints.new('COPY_TRANSFORMS')
|
||||
con_h.target = obj
|
||||
con_h.subtarget = hinge1
|
||||
|
||||
# Add drivers
|
||||
bone_path = pb[bones[0]].path_from_id()
|
||||
|
||||
driver_fcurve = con_f.driver_add("influence")
|
||||
driver = driver_fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.name = "free"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = bone_path + '["free"]'
|
||||
mod = driver_fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
driver_fcurve = con_h.driver_add("influence")
|
||||
driver = driver_fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
var = driver.variables.new()
|
||||
var.name = "hinge"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = bone_path + '["hinge"]'
|
||||
mod = driver_fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
|
||||
return None
|
@ -1,361 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import bone_class_instance, copy_bone_simple
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
# not used, defined for completeness
|
||||
METARIG_NAMES = ("body", "head")
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# TODO:
|
||||
## generated by rigify.write_meta_rig
|
||||
#bpy.ops.object.mode_set(mode='EDIT')
|
||||
#obj = bpy.context.active_object
|
||||
#arm = obj.data
|
||||
#bone = arm.edit_bones.new('body')
|
||||
#bone.head[:] = 0.0000, -0.0276, -0.1328
|
||||
#bone.tail[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
#bone = arm.edit_bones.new('head')
|
||||
#bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.tail[:] = 0.0000, 0.0726, 0.1354
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['body']
|
||||
#bone = arm.edit_bones.new('neck.01')
|
||||
#bone.head[:] = 0.0000, -0.0170, -0.0197
|
||||
#bone.tail[:] = 0.0000, -0.0099, 0.0146
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = False
|
||||
#bone.parent = arm.edit_bones['head']
|
||||
#bone = arm.edit_bones.new('neck.02')
|
||||
#bone.head[:] = 0.0000, -0.0099, 0.0146
|
||||
#bone.tail[:] = 0.0000, -0.0242, 0.0514
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.01']
|
||||
#bone = arm.edit_bones.new('neck.03')
|
||||
#bone.head[:] = 0.0000, -0.0242, 0.0514
|
||||
#bone.tail[:] = 0.0000, -0.0417, 0.0868
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.02']
|
||||
#bone = arm.edit_bones.new('neck.04')
|
||||
#bone.head[:] = 0.0000, -0.0417, 0.0868
|
||||
#bone.tail[:] = 0.0000, -0.0509, 0.1190
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.03']
|
||||
#bone = arm.edit_bones.new('neck.05')
|
||||
#bone.head[:] = 0.0000, -0.0509, 0.1190
|
||||
#bone.tail[:] = 0.0000, -0.0537, 0.1600
|
||||
#bone.roll = 0.0000
|
||||
#bone.use_connect = True
|
||||
#bone.parent = arm.edit_bones['neck.04']
|
||||
#
|
||||
#bpy.ops.object.mode_set(mode='OBJECT')
|
||||
#pbone = obj.pose.bones['head']
|
||||
#pbone['type'] = 'neck_flex'
|
||||
pass
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
'''
|
||||
The bone given is the tongue control, its parent is the body,
|
||||
# its only child the first of a chain with matching basenames.
|
||||
eg.
|
||||
body -> tongue_control -> tongue_01 -> tongue_02 -> tongue_03.... etc
|
||||
'''
|
||||
arm = obj.data
|
||||
tongue = arm.bones[orig_bone_name]
|
||||
body = tongue.parent
|
||||
|
||||
children = tongue.children
|
||||
if len(children) != 1:
|
||||
raise RigifyError("expected the tongue bone '%s' to have only 1 child." % orig_bone_name)
|
||||
|
||||
child = children[0]
|
||||
bone_definition = [body.name, tongue.name, child.name]
|
||||
bone_definition.extend([child.name for child in child.children_recursive_basename])
|
||||
return bone_definition
|
||||
|
||||
|
||||
def deform(obj, definitions, base_names, options):
|
||||
for org_bone_name in definitions[2:]:
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
# Create deform bone.
|
||||
bone = copy_bone_simple(obj.data, org_bone_name, "DEF-%s" % base_names[org_bone_name], parent=True)
|
||||
|
||||
# Store name before leaving edit mode
|
||||
bone_name = bone.name
|
||||
|
||||
# Leave edit mode
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Get the pose bone
|
||||
bone = obj.pose.bones[bone_name]
|
||||
|
||||
# Constrain to the original bone
|
||||
# XXX. Todo, is this needed if the bone is connected to its parent?
|
||||
con = bone.constraints.new('COPY_TRANSFORMS')
|
||||
con.name = "copy_loc"
|
||||
con.target = obj
|
||||
con.subtarget = org_bone_name
|
||||
|
||||
|
||||
# TODO: rename all of the head/neck references to tongue
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
from mathutils import Vector
|
||||
|
||||
arm = obj.data
|
||||
|
||||
# Initialize container classes for convenience
|
||||
mt = bone_class_instance(obj, ["body", "head"]) # meta
|
||||
mt.body = bone_definition[0]
|
||||
mt.head = bone_definition[1]
|
||||
mt.update()
|
||||
|
||||
neck_chain = bone_definition[2:]
|
||||
|
||||
mt_chain = bone_class_instance(obj, [("neck_%.2d" % (i + 1)) for i in range(len(neck_chain))]) # 99 bones enough eh?
|
||||
for i, attr in enumerate(mt_chain.attr_names):
|
||||
setattr(mt_chain, attr, neck_chain[i])
|
||||
mt_chain.update()
|
||||
|
||||
neck_chain_basename = base_names[mt_chain.neck_01_e.name].split(".")[0]
|
||||
neck_chain_segment_length = mt_chain.neck_01_e.length
|
||||
|
||||
ex = bone_class_instance(obj, ["head", "head_hinge", "neck_socket", "head_ctrl"]) # hinge & extras
|
||||
|
||||
# Add the head hinge at the bodys location, becomes the parent of the original head
|
||||
|
||||
# apply everything to this copy of the chain
|
||||
ex_chain = mt_chain.copy(base_names=base_names)
|
||||
ex_chain.neck_01_e.parent = mt_chain.neck_01_e.parent
|
||||
|
||||
|
||||
# Copy the head bone and offset
|
||||
ex.head_e = copy_bone_simple(arm, mt.head, "MCH-%s" % base_names[mt.head], parent=True)
|
||||
ex.head_e.use_connect = False
|
||||
ex.head = ex.head_e.name
|
||||
# offset
|
||||
head_length = ex.head_e.length
|
||||
ex.head_e.head.y += head_length / 2.0
|
||||
ex.head_e.tail.y += head_length / 2.0
|
||||
|
||||
# Yes, use the body bone but call it a head hinge
|
||||
ex.head_hinge_e = copy_bone_simple(arm, mt.body, "MCH-%s_hinge" % base_names[mt.head], parent=False)
|
||||
ex.head_hinge_e.use_connect = False
|
||||
ex.head_hinge = ex.head_hinge_e.name
|
||||
ex.head_hinge_e.head.y += head_length / 4.0
|
||||
ex.head_hinge_e.tail.y += head_length / 4.0
|
||||
|
||||
# Insert the neck socket, the head copys this loation
|
||||
ex.neck_socket_e = arm.edit_bones.new("MCH-%s_socked" % neck_chain_basename)
|
||||
ex.neck_socket = ex.neck_socket_e.name
|
||||
ex.neck_socket_e.use_connect = False
|
||||
ex.neck_socket_e.parent = mt.body_e
|
||||
ex.neck_socket_e.head = mt.head_e.head
|
||||
ex.neck_socket_e.tail = mt.head_e.head - Vector((0.0, neck_chain_segment_length / 2.0, 0.0))
|
||||
ex.neck_socket_e.roll = 0.0
|
||||
|
||||
|
||||
# copy of the head for controling
|
||||
ex.head_ctrl_e = copy_bone_simple(arm, mt.head, base_names[mt.head])
|
||||
ex.head_ctrl = ex.head_ctrl_e.name
|
||||
ex.head_ctrl_e.parent = ex.head_hinge_e
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_e = getattr(ex_chain, attr + "_e")
|
||||
|
||||
# dont store parent names, re-reference as each chain bones parent.
|
||||
neck_e_parent = arm.edit_bones.new("MCH-rot_%s" % base_names[getattr(mt_chain, attr)])
|
||||
neck_e_parent.head = neck_e.head
|
||||
neck_e_parent.tail = neck_e.head + (mt.head_e.vector.normalize() * neck_chain_segment_length / 2.0)
|
||||
neck_e_parent.roll = mt.head_e.roll
|
||||
|
||||
orig_parent = neck_e.parent
|
||||
neck_e.use_connect = False
|
||||
neck_e.parent = neck_e_parent
|
||||
neck_e_parent.use_connect = False
|
||||
|
||||
if i == 0:
|
||||
neck_e_parent.parent = mt.body_e
|
||||
else:
|
||||
neck_e_parent.parent = orig_parent
|
||||
|
||||
deform(obj, bone_definition, base_names, options)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
mt.update()
|
||||
mt_chain.update()
|
||||
ex_chain.update()
|
||||
ex.update()
|
||||
|
||||
# Axis locks
|
||||
ex.head_ctrl_p.lock_location = True, True, True
|
||||
ex.head_ctrl_p.lock_scale = True, False, True
|
||||
|
||||
# Simple one off constraints, no drivers
|
||||
con = ex.head_ctrl_p.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.neck_socket
|
||||
|
||||
con = ex.head_p.constraints.new('COPY_ROTATION')
|
||||
con.target = obj
|
||||
con.subtarget = ex.head_ctrl
|
||||
|
||||
# driven hinge
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, "hinge", create=True)
|
||||
ex.head_ctrl_p["hinge"] = 0.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
con = ex.head_hinge_p.constraints.new('COPY_ROTATION')
|
||||
con.name = "hinge"
|
||||
con.target = obj
|
||||
con.subtarget = mt.body
|
||||
|
||||
# add driver
|
||||
hinge_driver_path = ex.head_ctrl_p.path_to_id() + '["hinge"]'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
var = driver.variables.new()
|
||||
driver.type = 'AVERAGE'
|
||||
var.name = "var"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = hinge_driver_path
|
||||
|
||||
#mod = fcurve_driver.modifiers.new('GENERATOR')
|
||||
mod = fcurve.modifiers[0]
|
||||
mod.poly_order = 1
|
||||
mod.coefficients[0] = 1.0
|
||||
mod.coefficients[1] = -1.0
|
||||
|
||||
head_driver_path = ex.head_ctrl_p.path_to_id()
|
||||
|
||||
target_names = [("b%.2d" % (i + 1)) for i in range(len(neck_chain))]
|
||||
|
||||
ex.head_ctrl_p["bend_tot"] = 0.0
|
||||
fcurve = ex.head_ctrl_p.driver_add('["bend_tot"]')
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SUM'
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
for i in range(len(neck_chain)):
|
||||
var = driver.variables.new()
|
||||
var.name = target_names[i]
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["bend_%.2d"]' % (i + 1))
|
||||
|
||||
|
||||
for i, attr in enumerate(ex_chain.attr_names):
|
||||
neck_p = getattr(ex_chain, attr + "_p")
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_location = True, True, True
|
||||
neck_p.lock_rotations_4d = True
|
||||
|
||||
# Add bend prop
|
||||
prop_name = "bend_%.2d" % (i + 1)
|
||||
prop = rna_idprop_ui_prop_get(ex.head_ctrl_p, prop_name, create=True)
|
||||
ex.head_ctrl_p[prop_name] = 1.0
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
# add parent constraint
|
||||
neck_p_parent = neck_p.parent
|
||||
|
||||
# add constraints
|
||||
if i == 0:
|
||||
con = neck_p.constraints.new('COPY_SCALE')
|
||||
con.name = "Copy Scale"
|
||||
con.target = obj
|
||||
con.subtarget = ex.head_ctrl
|
||||
con.owner_space = 'LOCAL'
|
||||
con.target_space = 'LOCAL'
|
||||
|
||||
con = neck_p_parent.constraints.new('COPY_ROTATION')
|
||||
con.name = "Copy Rotation"
|
||||
con.target = obj
|
||||
con.subtarget = ex.head
|
||||
con.owner_space = 'LOCAL'
|
||||
con.target_space = 'LOCAL'
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'SCRIPTED'
|
||||
driver.expression = "bend/bend_tot"
|
||||
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
|
||||
# add target
|
||||
var = driver.variables.new()
|
||||
var.name = "bend_tot"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["bend_tot"]')
|
||||
|
||||
var = driver.variables.new()
|
||||
var.name = "bend"
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = head_driver_path + ('["%s"]' % prop_name)
|
||||
|
||||
|
||||
# finally constrain the original bone to this one
|
||||
orig_neck_p = getattr(mt_chain, attr + "_p")
|
||||
con = orig_neck_p.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = neck_p.name
|
||||
|
||||
|
||||
# Set the head control's custom shape to use the last
|
||||
# org neck bone for its transform
|
||||
ex.head_ctrl_p.custom_shape_transform = obj.pose.bones[bone_definition[len(bone_definition)-1]]
|
||||
|
||||
|
||||
# last step setup layers
|
||||
if "ex_layer" in options:
|
||||
layer = [n==options["ex_layer"] for n in range(0,32)]
|
||||
else:
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
for attr in ex_chain.attr_names:
|
||||
getattr(ex_chain, attr + "_b").layers = layer
|
||||
for attr in ex.attr_names:
|
||||
getattr(ex, attr + "_b").layers = layer
|
||||
|
||||
layer = list(arm.bones[bone_definition[1]].layers)
|
||||
ex.head_ctrl_b.layers = layer
|
||||
|
||||
|
||||
# no blending the result of this
|
||||
return None
|
||||
|
@ -1,110 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
METARIG_NAMES = tuple()
|
||||
RIG_TYPE = "track_dual"
|
||||
|
||||
# TODO
|
||||
#def metarig_template():
|
||||
# # generated by rigify.write_meta_rig
|
||||
# bpy.ops.object.mode_set(mode='EDIT')
|
||||
# obj = bpy.context.active_object
|
||||
# arm = obj.data
|
||||
# bone = arm.edit_bones.new('Bone')
|
||||
# bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
# bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
# bone.roll = 0.0000
|
||||
# bone.use_connect = False
|
||||
#
|
||||
# bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# pbone = obj.pose.bones['Bone']
|
||||
# pbone['type'] = 'copy'
|
||||
|
||||
bool_map = {0: False, 1: True,
|
||||
0.0: False, 1.0: True,
|
||||
"false": False, "true": True,
|
||||
"False": False, "True": True,
|
||||
"no": False, "yes": True,
|
||||
"No": False, "Yes": True}
|
||||
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return (orig_bone_name,)
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
""" A dual-bone track setup.
|
||||
Deformation only (no controls).
|
||||
"""
|
||||
# Verify required parameter
|
||||
if "to" not in options:
|
||||
raise RigifyError("'%s' rig type requires a 'to' parameter (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if type(options["to"]) is not str:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must be a string (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if ("ORG-" + options["to"]) not in obj.data.bones:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must name a bone in the metarig (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
arm = obj.data
|
||||
|
||||
mbone1 = bone_definition[0]
|
||||
mbone2 = "ORG-" + options["to"]
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone1, "DEF-%s.01" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone1]
|
||||
bone_e.tail = (eb[mbone1].head + eb[mbone2].head) / 2
|
||||
bone1 = bone_e.name
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone2, "DEF-%s.02" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone1]
|
||||
bone_e.tail = (eb[mbone1].head + eb[mbone2].head) / 2
|
||||
bone2 = bone_e.name
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
# Bone 1
|
||||
con = pb[bone1].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
|
||||
|
||||
# Bone 2
|
||||
con = pb[bone2].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
|
||||
con = pb[bone2].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mbone1
|
||||
|
||||
|
||||
return tuple()
|
@ -1,100 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import bpy
|
||||
from rigify import RigifyError
|
||||
from rigify_utils import copy_bone_simple
|
||||
|
||||
METARIG_NAMES = tuple()
|
||||
RIG_TYPE = "track_reverse"
|
||||
|
||||
# TODO
|
||||
#def metarig_template():
|
||||
# # generated by rigify.write_meta_rig
|
||||
# bpy.ops.object.mode_set(mode='EDIT')
|
||||
# obj = bpy.context.active_object
|
||||
# arm = obj.data
|
||||
# bone = arm.edit_bones.new('Bone')
|
||||
# bone.head[:] = 0.0000, 0.0000, 0.0000
|
||||
# bone.tail[:] = 0.0000, 0.0000, 1.0000
|
||||
# bone.roll = 0.0000
|
||||
# bone.use_connect = False
|
||||
#
|
||||
# bpy.ops.object.mode_set(mode='OBJECT')
|
||||
# pbone = obj.pose.bones['Bone']
|
||||
# pbone['type'] = 'copy'
|
||||
|
||||
bool_map = {0:False, 1:True,
|
||||
0.0:False, 1.0:True,
|
||||
"false":False, "true":True,
|
||||
"False":False, "True":True,
|
||||
"no":False, "yes":True,
|
||||
"No":False, "Yes":True}
|
||||
|
||||
def metarig_definition(obj, orig_bone_name):
|
||||
return (orig_bone_name,)
|
||||
|
||||
|
||||
|
||||
|
||||
def main(obj, bone_definition, base_names, options):
|
||||
""" A bone that tracks bakwards towards its parent, while copying the
|
||||
location of it's target.
|
||||
Deformation only (no controls).
|
||||
"""
|
||||
# Verify required parameter
|
||||
if "to" not in options:
|
||||
raise RigifyError("'%s' rig type requires a 'to' parameter (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if type(options["to"]) is not str:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must be a string (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
if ("ORG-" + options["to"]) not in obj.data.bones:
|
||||
raise RigifyError("'%s' rig type 'to' parameter must name a bone in the metarig (bone: %s)" % (RIG_TYPE, base_names[0]))
|
||||
|
||||
eb = obj.data.edit_bones
|
||||
bb = obj.data.bones
|
||||
pb = obj.pose.bones
|
||||
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
arm = obj.data
|
||||
|
||||
mbone1 = bone_definition[0]
|
||||
mbone2 = "ORG-" + options["to"]
|
||||
|
||||
bone_e = copy_bone_simple(obj.data, mbone2, "DEF-%s.02" % base_names[bone_definition[0]])
|
||||
bone_e.use_connect = False
|
||||
bone_e.parent = eb[mbone1]
|
||||
bone_e.tail = eb[mbone1].head
|
||||
bone = bone_e.name
|
||||
|
||||
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# Constraints
|
||||
con = pb[bone].constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = mbone2
|
||||
|
||||
con = pb[bone].constraints.new('DAMPED_TRACK')
|
||||
con.target = obj
|
||||
con.subtarget = mbone1
|
||||
|
||||
|
||||
return tuple()
|
@ -1,467 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# rigify its self does not depend on this module, however some of the
|
||||
# rigify templates use these utility functions.
|
||||
#
|
||||
# So even though this can be for general purpose use, this module was created
|
||||
# for rigify so in some cases seemingly generic functions make assumptions
|
||||
# that a generic function would need to check for.
|
||||
|
||||
import bpy
|
||||
from mathutils import Vector
|
||||
from rna_prop_ui import rna_idprop_ui_prop_get
|
||||
|
||||
DELIMITER = '-._'
|
||||
EMPTY_LAYER = [False] * 32
|
||||
|
||||
|
||||
def add_stretch_to(obj, from_name, to_name, name):
|
||||
'''
|
||||
Adds a bone that stretches from one to another
|
||||
'''
|
||||
|
||||
mode_orig = obj.mode
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
arm = obj.data
|
||||
stretch_ebone = arm.edit_bones.new(name)
|
||||
stretch_name = stretch_ebone.name
|
||||
del name
|
||||
|
||||
head = stretch_ebone.head = arm.edit_bones[from_name].head.copy()
|
||||
#tail = stretch_ebone.tail = arm.edit_bones[to_name].head.copy()
|
||||
|
||||
# annoying exception for zero length bones, since its using stretch_to the rest pose doesnt really matter
|
||||
#if (head - tail).length < 0.1:
|
||||
if 1:
|
||||
tail = stretch_ebone.tail = arm.edit_bones[from_name].tail.copy()
|
||||
|
||||
|
||||
# Now for the constraint
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
stretch_pbone = obj.pose.bones[stretch_name]
|
||||
|
||||
con = stretch_pbone.constraints.new('COPY_LOCATION')
|
||||
con.target = obj
|
||||
con.subtarget = from_name
|
||||
|
||||
con = stretch_pbone.constraints.new('STRETCH_TO')
|
||||
con.target = obj
|
||||
con.subtarget = to_name
|
||||
con.rest_length = (head - tail).length
|
||||
con.keep_axis = 'PLANE_X'
|
||||
con.volume = 'NO_VOLUME'
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
|
||||
return stretch_name
|
||||
|
||||
|
||||
def copy_bone_simple(arm, from_bone, name, parent=False):
|
||||
ebone = arm.edit_bones[from_bone]
|
||||
ebone_new = arm.edit_bones.new(name)
|
||||
|
||||
if parent:
|
||||
ebone_new.use_connect = ebone.use_connect
|
||||
ebone_new.parent = ebone.parent
|
||||
|
||||
ebone_new.head = ebone.head
|
||||
ebone_new.tail = ebone.tail
|
||||
ebone_new.roll = ebone.roll
|
||||
ebone_new.layers = list(ebone.layers)
|
||||
return ebone_new
|
||||
|
||||
|
||||
def copy_bone_simple_list(arm, from_bones, to_bones, parent=False):
|
||||
|
||||
if len(from_bones) != len(to_bones):
|
||||
raise Exception("bone list sizes must match")
|
||||
|
||||
copy_bones = [copy_bone_simple(arm, bone_name, to_bones[i], True) for i, bone_name in enumerate(from_bones)]
|
||||
|
||||
# now we need to re-parent
|
||||
for ebone in copy_bones:
|
||||
parent = ebone.parent
|
||||
if parent:
|
||||
try:
|
||||
i = from_bones.index(parent.name)
|
||||
except:
|
||||
i = -1
|
||||
|
||||
if i == -1:
|
||||
ebone.parent = None
|
||||
else:
|
||||
ebone.parent = copy_bones[i]
|
||||
|
||||
return copy_bones
|
||||
|
||||
|
||||
def blend_bone_list(obj, apply_bones, from_bones, to_bones, target_bone=None, target_prop="blend", blend_default=0.5):
|
||||
|
||||
if obj.mode == 'EDIT':
|
||||
raise Exception("blending cant be called in editmode")
|
||||
|
||||
if len(apply_bones) != len(from_bones):
|
||||
raise Exception("lists differ in length (from -> apply): \n\t%s\n\t%s" % (from_bones, apply_bones))
|
||||
if len(apply_bones) != len(to_bones):
|
||||
raise Exception("lists differ in length (to -> apply): \n\t%s\n\t%s" % (to_bones, apply_bones))
|
||||
|
||||
# setup the blend property
|
||||
if target_bone is None:
|
||||
target_bone = apply_bones[-1] # default to the last bone
|
||||
|
||||
prop_pbone = obj.pose.bones[target_bone]
|
||||
if prop_pbone.get(target_bone) is None:
|
||||
prop = rna_idprop_ui_prop_get(prop_pbone, target_prop, create=True)
|
||||
prop_pbone[target_prop] = blend_default
|
||||
prop["soft_min"] = 0.0
|
||||
prop["soft_max"] = 1.0
|
||||
|
||||
driver_path = prop_pbone.path_from_id() + ('["%s"]' % target_prop)
|
||||
|
||||
def blend_target(driver):
|
||||
var = driver.variables.new()
|
||||
var.name = target_bone
|
||||
var.targets[0].id_type = 'OBJECT'
|
||||
var.targets[0].id = obj
|
||||
var.targets[0].data_path = driver_path
|
||||
|
||||
def blend_transforms(new_pbone, from_bone_name, to_bone_name):
|
||||
con = new_pbone.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = from_bone_name
|
||||
|
||||
con = new_pbone.constraints.new('COPY_TRANSFORMS')
|
||||
con.target = obj
|
||||
con.subtarget = to_bone_name
|
||||
|
||||
fcurve = con.driver_add("influence")
|
||||
driver = fcurve.driver
|
||||
driver.type = 'AVERAGE'
|
||||
fcurve.modifiers.remove(fcurve.modifiers[0]) # grr dont need a modifier
|
||||
|
||||
blend_target(driver)
|
||||
|
||||
for i, new_bone_name in enumerate(apply_bones):
|
||||
from_bone_name = from_bones[i]
|
||||
to_bone_name = to_bones[i]
|
||||
|
||||
# allow skipping some bones by having None in the list
|
||||
if None in (new_bone_name, from_bone_name, to_bone_name):
|
||||
continue
|
||||
|
||||
new_pbone = obj.pose.bones[new_bone_name]
|
||||
|
||||
blend_transforms(new_pbone, from_bone_name, to_bone_name)
|
||||
|
||||
|
||||
def add_pole_target_bone(obj, base_bone_name, name, mode='CROSS'):
|
||||
'''
|
||||
Does not actually create a poll target, just the bone to use as a poll target
|
||||
'''
|
||||
mode_orig = obj.mode
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
|
||||
arm = obj.data
|
||||
|
||||
poll_ebone = arm.edit_bones.new(name)
|
||||
base_ebone = arm.edit_bones[base_bone_name]
|
||||
poll_name = poll_ebone.name
|
||||
parent_ebone = base_ebone.parent
|
||||
|
||||
base_head = base_ebone.head.copy()
|
||||
base_tail = base_ebone.tail.copy()
|
||||
base_dir = base_head - base_tail
|
||||
|
||||
parent_head = parent_ebone.head.copy()
|
||||
parent_tail = parent_ebone.tail.copy()
|
||||
parent_dir = parent_head - parent_tail
|
||||
|
||||
distance = (base_dir.length + parent_dir.length)
|
||||
|
||||
if mode == 'CROSS':
|
||||
# direction from the angle of the joint
|
||||
offset = base_dir.copy().normalize() - parent_dir.copy().normalize()
|
||||
offset.length = distance
|
||||
elif mode == 'ZAVERAGE':
|
||||
# between both bones Z axis
|
||||
z_axis_a = base_ebone.matrix.copy().rotation_part() * Vector((0.0, 0.0, -1.0))
|
||||
z_axis_b = parent_ebone.matrix.copy().rotation_part() * Vector((0.0, 0.0, -1.0))
|
||||
offset = (z_axis_a + z_axis_b).normalize() * distance
|
||||
else:
|
||||
# preset axis
|
||||
offset = Vector((0.0, 0.0, 0.0))
|
||||
if mode[0] == "+":
|
||||
val = distance
|
||||
else:
|
||||
val = - distance
|
||||
|
||||
setattr(offset, mode[1].lower(), val)
|
||||
|
||||
poll_ebone.head = base_head + offset
|
||||
poll_ebone.tail = base_head + (offset * (1.0 - (1.0 / 4.0)))
|
||||
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
|
||||
return poll_name
|
||||
|
||||
|
||||
def get_side_name(name):
|
||||
'''
|
||||
Returns the last part of a string (typically a bone's name) indicating
|
||||
whether it is a a left or right (or center, or whatever) bone.
|
||||
Returns an empty string if nothing is found.
|
||||
'''
|
||||
if name[-2] in DELIMITER:
|
||||
return name[-2:]
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def get_base_name(name):
|
||||
'''
|
||||
Returns the part of a string (typically a bone's name) corresponding to it's
|
||||
base name (no sidedness, no ORG prefix).
|
||||
'''
|
||||
if name[-2] in DELIMITER:
|
||||
return name[:-2]
|
||||
else:
|
||||
return name
|
||||
|
||||
|
||||
def write_meta_rig(obj, func_name="metarig_template"):
|
||||
'''
|
||||
Write a metarig as a python script, this rig is to have all info needed for
|
||||
generating the real rig with rigify.
|
||||
'''
|
||||
code = []
|
||||
|
||||
code.append("def %s():" % func_name)
|
||||
code.append(" # generated by rigify.write_meta_rig")
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
code.append(" bpy.ops.object.mode_set(mode='EDIT')")
|
||||
|
||||
code.append(" obj = bpy.context.active_object")
|
||||
code.append(" arm = obj.data")
|
||||
|
||||
arm = obj.data
|
||||
# write parents first
|
||||
bones = [(len(bone.parent_recursive), bone.name) for bone in arm.edit_bones]
|
||||
bones.sort(key=lambda item: item[0])
|
||||
bones = [item[1] for item in bones]
|
||||
|
||||
|
||||
for bone_name in bones:
|
||||
bone = arm.edit_bones[bone_name]
|
||||
code.append(" bone = arm.edit_bones.new('%s')" % bone.name)
|
||||
code.append(" bone.head[:] = %.4f, %.4f, %.4f" % bone.head.to_tuple(4))
|
||||
code.append(" bone.tail[:] = %.4f, %.4f, %.4f" % bone.tail.to_tuple(4))
|
||||
code.append(" bone.roll = %.4f" % bone.roll)
|
||||
code.append(" bone.use_connect = %s" % str(bone.use_connect))
|
||||
if bone.parent:
|
||||
code.append(" bone.parent = arm.edit_bones['%s']" % bone.parent.name)
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
code.append("")
|
||||
code.append(" bpy.ops.object.mode_set(mode='OBJECT')")
|
||||
|
||||
for bone_name in bones:
|
||||
pbone = obj.pose.bones[bone_name]
|
||||
pbone_written = False
|
||||
|
||||
# Only 1 level of props, simple types supported
|
||||
for key, value in pbone.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
if type(value) not in (float, str, int):
|
||||
print("Unsupported ID Prop:", str((key, value)))
|
||||
continue
|
||||
|
||||
if type(value) == str:
|
||||
value = "'" + value + "'"
|
||||
|
||||
if not pbone_written: # only write bones we need
|
||||
code.append(" pbone = obj.pose.bones['%s']" % bone_name)
|
||||
|
||||
code.append(" pbone['%s'] = %s" % (key, value))
|
||||
|
||||
return "\n".join(code)
|
||||
|
||||
|
||||
# *** bone class collection ***
|
||||
|
||||
|
||||
def bone_class_instance(obj, slots, name="BoneContainer"):
|
||||
'''
|
||||
bone collection utility class to help manage cases with
|
||||
edit/pose/bone bones where switching modes can invalidate some of the members.
|
||||
|
||||
there are also utility functions for manipulating all members.
|
||||
'''
|
||||
|
||||
attr_names = tuple(slots) # dont modify the original
|
||||
|
||||
if len(slots) != len(set(slots)):
|
||||
raise Exception("duplicate entries found %s" % attr_names)
|
||||
|
||||
slots = list(slots) # dont modify the original
|
||||
for i in range(len(slots)):
|
||||
member = slots[i]
|
||||
slots.append(member + "_b") # bone bone
|
||||
slots.append(member + "_p") # pose bone
|
||||
slots.append(member + "_e") # edit bone
|
||||
|
||||
class_dict = { \
|
||||
"obj": obj, \
|
||||
"attr_names": attr_names, \
|
||||
"attr_initialize": _bone_class_instance_attr_initialize, \
|
||||
"update": _bone_class_instance_update, \
|
||||
"rename": _bone_class_instance_rename, \
|
||||
"names": _bone_class_instance_names, \
|
||||
"copy": _bone_class_instance_copy, \
|
||||
"blend": _bone_class_instance_blend, \
|
||||
}
|
||||
|
||||
instance = auto_class_instance(slots, name, class_dict)
|
||||
return instance
|
||||
|
||||
|
||||
def auto_class(slots, name="ContainerClass", class_dict=None):
|
||||
|
||||
if class_dict:
|
||||
class_dict = class_dict.copy()
|
||||
else:
|
||||
class_dict = {}
|
||||
|
||||
class_dict["__slots__"] = tuple(slots)
|
||||
|
||||
return type(name, (object,), class_dict)
|
||||
|
||||
|
||||
def auto_class_instance(slots, name="ContainerClass", class_dict=None):
|
||||
return auto_class(slots, name, class_dict)()
|
||||
|
||||
|
||||
def _bone_class_instance_attr_initialize(self, attr_names, bone_names):
|
||||
''' Initializes attributes, both lists must be aligned
|
||||
'''
|
||||
for attr in self.attr_names:
|
||||
i = attr_names.index(attr)
|
||||
setattr(self, attr, bone_names[i])
|
||||
|
||||
self.update()
|
||||
|
||||
|
||||
def _bone_class_instance_update(self):
|
||||
''' Re-Assigns bones from the blender data
|
||||
'''
|
||||
arm = self.obj.data
|
||||
bbones = arm.bones
|
||||
pbones = self.obj.pose.bones
|
||||
ebones = arm.edit_bones
|
||||
|
||||
for member in self.attr_names:
|
||||
name = getattr(self, member, None)
|
||||
if name is not None:
|
||||
setattr(self, member + "_b", bbones.get(name))
|
||||
setattr(self, member + "_p", pbones.get(name))
|
||||
setattr(self, member + "_e", ebones.get(name))
|
||||
|
||||
|
||||
def _bone_class_instance_rename(self, attr, new_name):
|
||||
''' Rename bones, editmode only
|
||||
'''
|
||||
|
||||
if self.obj.mode != 'EDIT':
|
||||
raise Exception("Only rename in editmode supported")
|
||||
|
||||
ebone = getattr(self, attr + "_e")
|
||||
ebone.name = new_name
|
||||
|
||||
# we may not get what is asked for so get the name from the editbone
|
||||
setattr(self, attr, ebone.name)
|
||||
|
||||
|
||||
def _bone_class_instance_copy(self, from_fmt="%s", to_fmt="%s", exclude_attrs=(), base_names=None):
|
||||
from_name_ls = []
|
||||
new_name_ls = []
|
||||
new_slot_ls = []
|
||||
|
||||
for attr in self.attr_names:
|
||||
|
||||
if attr in exclude_attrs:
|
||||
continue
|
||||
|
||||
bone_name_orig = getattr(self, attr)
|
||||
ebone = getattr(self, attr + "_e")
|
||||
# orig_names[attr] = bone_name_orig
|
||||
|
||||
# insert formatting
|
||||
if from_fmt != "%s":
|
||||
bone_name = from_fmt % bone_name_orig
|
||||
ebone.name = bone_name
|
||||
bone_name = ebone.name # cant be sure we get what we ask for
|
||||
else:
|
||||
bone_name = bone_name_orig
|
||||
|
||||
setattr(self, attr, bone_name)
|
||||
|
||||
new_slot_ls.append(attr)
|
||||
from_name_ls.append(bone_name)
|
||||
if base_names:
|
||||
bone_name_orig = base_names[bone_name_orig]
|
||||
new_name_ls.append(to_fmt % bone_name_orig)
|
||||
|
||||
new_bones = copy_bone_simple_list(self.obj.data, from_name_ls, new_name_ls, True)
|
||||
new_bc = bone_class_instance(self.obj, new_slot_ls)
|
||||
|
||||
for i, attr in enumerate(new_slot_ls):
|
||||
ebone = new_bones[i]
|
||||
setattr(new_bc, attr + "_e", ebone)
|
||||
setattr(new_bc, attr, ebone.name)
|
||||
|
||||
return new_bc
|
||||
|
||||
|
||||
def _bone_class_instance_names(self):
|
||||
return [getattr(self, attr) for attr in self.attr_names]
|
||||
|
||||
|
||||
def _bone_class_instance_blend(self, from_bc, to_bc, target_bone=None, target_prop="blend"):
|
||||
'''
|
||||
Use for blending bone chains.
|
||||
|
||||
blend_target = (bone_name, bone_property)
|
||||
default to the last bone, blend prop
|
||||
|
||||
XXX - toggles editmode, need to re-validate all editbones :(
|
||||
'''
|
||||
|
||||
if self.attr_names != from_bc.attr_names or self.attr_names != to_bc.attr_names:
|
||||
raise Exception("can only blend between matching chains")
|
||||
|
||||
apply_bones = [getattr(self, attr) for attr in self.attr_names]
|
||||
from_bones = [getattr(from_bc, attr) for attr in from_bc.attr_names]
|
||||
to_bones = [getattr(to_bc, attr) for attr in to_bc.attr_names]
|
||||
|
||||
blend_bone_list(self.obj, apply_bones, from_bones, to_bones, target_bone, target_prop)
|
@ -152,7 +152,7 @@ class InfoStructRNA:
|
||||
functions.append((identifier, attr))
|
||||
return functions
|
||||
|
||||
def __repr__(self):
|
||||
def __str__(self):
|
||||
|
||||
txt = ""
|
||||
txt += self.identifier
|
||||
@ -285,7 +285,7 @@ class InfoPropertyRNA:
|
||||
|
||||
return type_str
|
||||
|
||||
def __repr__(self):
|
||||
def __str__(self):
|
||||
txt = ''
|
||||
txt += ' * ' + self.identifier + ': ' + self.description
|
||||
|
||||
@ -319,7 +319,7 @@ class InfoFunctionRNA:
|
||||
|
||||
self.return_values = tuple(self.return_values)
|
||||
|
||||
def __repr__(self):
|
||||
def __str__(self):
|
||||
txt = ''
|
||||
txt += ' * ' + self.identifier + '('
|
||||
|
||||
|
@ -58,7 +58,24 @@ def rna_idprop_ui_prop_clear(item, prop):
|
||||
pass
|
||||
|
||||
|
||||
def draw(layout, context, context_member, use_edit=True):
|
||||
def rna_idprop_context_value(context, context_member, property_type):
|
||||
space = context.space_data
|
||||
|
||||
if space is None or isinstance(space, bpy.types.SpaceProperties):
|
||||
pin_id = space.pin_id
|
||||
else:
|
||||
pin_id = None
|
||||
|
||||
if pin_id and isinstance(pin_id, property_type):
|
||||
rna_item = pin_id
|
||||
context_member = "space_data.pin_id"
|
||||
else:
|
||||
rna_item = eval("context." + context_member)
|
||||
|
||||
return rna_item, context_member
|
||||
|
||||
|
||||
def draw(layout, context, context_member, property_type, use_edit=True):
|
||||
|
||||
def assign_props(prop, val, key):
|
||||
prop.data_path = context_member
|
||||
@ -69,12 +86,14 @@ def draw(layout, context, context_member, use_edit=True):
|
||||
except:
|
||||
pass
|
||||
|
||||
rna_item = eval("context." + context_member)
|
||||
rna_item, context_member = rna_idprop_context_value(context, context_member, property_type)
|
||||
|
||||
# poll should really get this...
|
||||
if not rna_item:
|
||||
return
|
||||
|
||||
assert(isinstance(rna_item, property_type))
|
||||
|
||||
items = rna_item.items()
|
||||
items.sort()
|
||||
|
||||
@ -139,7 +158,16 @@ class PropertyPanel():
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return bool(eval("context.%s" % cls._context_path))
|
||||
rna_item, context_member = rna_idprop_context_value(context, cls._context_path, cls._property_type)
|
||||
return bool(rna_item)
|
||||
|
||||
"""
|
||||
def draw_header(self, context):
|
||||
rna_item, context_member = rna_idprop_context_value(context, self._context_path, self._property_type)
|
||||
tot = len(rna_item.keys())
|
||||
if tot:
|
||||
self.layout().label("%d:" % tot)
|
||||
"""
|
||||
|
||||
def draw(self, context):
|
||||
draw(self.layout, context, self._context_path)
|
||||
draw(self.layout, context, self._context_path, self._property_type)
|
||||
|
111
release/scripts/modules/sys_info.py
Normal file
@ -0,0 +1,111 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# classes for extracting info from blenders internal classes
|
||||
|
||||
import bpy
|
||||
import bgl
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def cutPoint(text, length):
|
||||
"Returns position of the last space found before 'length' chars"
|
||||
l = length
|
||||
c = text[l]
|
||||
while c != ' ':
|
||||
l -= 1
|
||||
if l == 0:
|
||||
return length # no space found
|
||||
c = text[l]
|
||||
return l
|
||||
|
||||
|
||||
def textWrap(text, length=70):
|
||||
lines = []
|
||||
while len(text) > 70:
|
||||
cpt = cutPoint(text, length)
|
||||
line, text = text[:cpt], text[cpt + 1:]
|
||||
lines.append(line)
|
||||
lines.append(text)
|
||||
return lines
|
||||
|
||||
|
||||
def write_sysinfo(op):
|
||||
output_filename = "system-info.txt"
|
||||
warnings = 0
|
||||
notices = 0
|
||||
|
||||
if output_filename in bpy.data.texts.keys():
|
||||
output = bpy.data.texts[output_filename]
|
||||
output.clear()
|
||||
else:
|
||||
output = bpy.data.texts.new(name=output_filename)
|
||||
|
||||
header = '= Blender {} System Information =\n'.format(bpy.app.version_string)
|
||||
lilies = '{}\n\n'.format(len(header) * '=')
|
||||
firstlilies = '{}\n'.format(len(header) * '=')
|
||||
output.write(firstlilies)
|
||||
output.write(header)
|
||||
output.write(lilies)
|
||||
|
||||
# build info
|
||||
output.write('\nBlender:\n')
|
||||
output.write(lilies)
|
||||
output.write('version {}, revision {}. {}\n'.format(bpy.app.version_string, bpy.app.build_revision, bpy.app.build_type))
|
||||
output.write('build date: {}, {}\n'.format(bpy.app.build_date, bpy.app.build_time))
|
||||
output.write('platform: {}\n'.format(bpy.app.build_platform))
|
||||
output.write('binary path: {}\n'.format(bpy.app.binary_path))
|
||||
output.write('build cflags: {}\n'.format(bpy.app.build_cflags))
|
||||
output.write('build cxxflags: {}\n'.format(bpy.app.build_cxxflags))
|
||||
output.write('build linkflags: {}\n'.format(bpy.app.build_linkflags))
|
||||
output.write('build system: {}\n'.format(bpy.app.build_system))
|
||||
|
||||
# python info
|
||||
output.write('\nPython:\n')
|
||||
output.write(lilies)
|
||||
output.write('version: {}\n'.format(sys.version))
|
||||
output.write('paths:\n')
|
||||
for p in sys.path:
|
||||
output.write('\t{}\n'.format(p))
|
||||
|
||||
output.write('\nDirectories:\n')
|
||||
output.write(lilies)
|
||||
output.write('scripts: {}\n'.format(bpy.utils.script_paths()))
|
||||
output.write('user scripts: {}\n'.format(bpy.utils.user_script_path()))
|
||||
output.write('datafiles: {}\n'.format(bpy.utils.user_resource('DATAFILES')))
|
||||
output.write('config: {}\n'.format(bpy.utils.user_resource('CONFIG')))
|
||||
output.write('scripts : {}\n'.format(bpy.utils.user_resource('SCRIPTS')))
|
||||
output.write('autosave: {}\n'.format(bpy.utils.user_resource('AUTOSAVE')))
|
||||
output.write('tempdir: {}\n'.format(bpy.app.tempdir))
|
||||
|
||||
output.write('\nOpenGL\n')
|
||||
output.write(lilies)
|
||||
output.write('renderer:\t{}\n'.format(bgl.glGetString(bgl.GL_RENDERER)))
|
||||
output.write('vendor:\t\t{}\n'.format(bgl.glGetString(bgl.GL_VENDOR)))
|
||||
output.write('version:\t{}\n'.format(bgl.glGetString(bgl.GL_VERSION)))
|
||||
output.write('extensions:\n')
|
||||
|
||||
glext = bgl.glGetString(bgl.GL_EXTENSIONS)
|
||||
glext = textWrap(glext, 70)
|
||||
for l in glext:
|
||||
output.write('\t\t{}\n'.format(l))
|
||||
|
||||
op.report({'INFO'}, "System information generated in 'system-info.txt'")
|
@ -1,616 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8-80 compliant>
|
||||
import bpy
|
||||
import mathutils
|
||||
from math import cos, sin, pi
|
||||
|
||||
# could this be stored elsewhere?
|
||||
|
||||
|
||||
def metarig_template():
|
||||
# generated by rigify.write_meta_rig
|
||||
bpy.ops.object.mode_set(mode='EDIT')
|
||||
obj = bpy.context.active_object
|
||||
arm = obj.data
|
||||
bone = arm.edit_bones.new('pelvis')
|
||||
bone.head[:] = -0.0000, -0.0145, 1.1263
|
||||
bone.tail[:] = -0.0000, -0.0145, 0.9563
|
||||
bone.roll = 3.1416
|
||||
bone.use_connect = False
|
||||
bone = arm.edit_bones.new('torso')
|
||||
bone.head[:] = -0.0000, -0.0145, 1.1263
|
||||
bone.tail[:] = -0.0000, -0.0145, 1.2863
|
||||
bone.roll = 3.1416
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['pelvis']
|
||||
bone = arm.edit_bones.new('spine.01')
|
||||
bone.head[:] = 0.0000, 0.0394, 0.9688
|
||||
bone.tail[:] = -0.0000, -0.0145, 1.1263
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['torso']
|
||||
bone = arm.edit_bones.new('spine.02')
|
||||
bone.head[:] = -0.0000, -0.0145, 1.1263
|
||||
bone.tail[:] = -0.0000, -0.0213, 1.2884
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.01']
|
||||
bone = arm.edit_bones.new('thigh.L')
|
||||
bone.head[:] = 0.0933, -0.0421, 1.0434
|
||||
bone.tail[:] = 0.0933, -0.0516, 0.5848
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['spine.01']
|
||||
bone = arm.edit_bones.new('thigh.R')
|
||||
bone.head[:] = -0.0933, -0.0421, 1.0434
|
||||
bone.tail[:] = -0.0933, -0.0516, 0.5848
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['spine.01']
|
||||
bone = arm.edit_bones.new('spine.03')
|
||||
bone.head[:] = -0.0000, -0.0213, 1.2884
|
||||
bone.tail[:] = -0.0000, 0.0160, 1.3705
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.02']
|
||||
bone = arm.edit_bones.new('shin.L')
|
||||
bone.head[:] = 0.0933, -0.0516, 0.5848
|
||||
bone.tail[:] = 0.0915, 0.0100, 0.1374
|
||||
bone.roll = 0.0034
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thigh.L']
|
||||
bone = arm.edit_bones.new('shin.R')
|
||||
bone.head[:] = -0.0933, -0.0516, 0.5848
|
||||
bone.tail[:] = -0.0915, 0.0100, 0.1374
|
||||
bone.roll = -0.0034
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thigh.R']
|
||||
bone = arm.edit_bones.new('spine.04')
|
||||
bone.head[:] = -0.0000, 0.0160, 1.3705
|
||||
bone.tail[:] = -0.0000, 0.0590, 1.4497
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.03']
|
||||
bone = arm.edit_bones.new('foot.L')
|
||||
bone.head[:] = 0.0915, 0.0100, 0.1374
|
||||
bone.tail[:] = 0.1033, -0.0968, 0.0510
|
||||
bone.roll = 2.8964
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['shin.L']
|
||||
bone = arm.edit_bones.new('foot.R')
|
||||
bone.head[:] = -0.0915, 0.0100, 0.1374
|
||||
bone.tail[:] = -0.1033, -0.0968, 0.0510
|
||||
bone.roll = -2.8793
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['shin.R']
|
||||
bone = arm.edit_bones.new('neck_base')
|
||||
bone.head[:] = -0.0000, 0.0590, 1.4497
|
||||
bone.tail[:] = -0.0000, 0.0401, 1.5389
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['spine.04']
|
||||
bone = arm.edit_bones.new('toe.L')
|
||||
bone.head[:] = 0.1033, -0.0968, 0.0510
|
||||
bone.tail[:] = 0.1136, -0.1848, 0.0510
|
||||
bone.roll = 0.0001
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['foot.L']
|
||||
bone = arm.edit_bones.new('heel.L')
|
||||
bone.head[:] = 0.0809, 0.0969, -0.0000
|
||||
bone.tail[:] = 0.1020, -0.0846, -0.0000
|
||||
bone.roll = -0.0001
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['foot.L']
|
||||
bone = arm.edit_bones.new('toe.R')
|
||||
bone.head[:] = -0.1033, -0.0968, 0.0510
|
||||
bone.tail[:] = -0.1136, -0.1848, 0.0510
|
||||
bone.roll = -0.0002
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['foot.R']
|
||||
bone = arm.edit_bones.new('heel.R')
|
||||
bone.head[:] = -0.0809, 0.0969, -0.0000
|
||||
bone.tail[:] = -0.1020, -0.0846, -0.0000
|
||||
bone.roll = -0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['foot.R']
|
||||
bone = arm.edit_bones.new('head')
|
||||
bone.head[:] = -0.0000, 0.0401, 1.5389
|
||||
bone.tail[:] = -0.0000, 0.0401, 1.5979
|
||||
bone.roll = 3.1416
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck_base']
|
||||
bone = arm.edit_bones.new('DLT-shoulder.L')
|
||||
bone.head[:] = 0.0141, -0.0346, 1.4991
|
||||
bone.tail[:] = 0.1226, 0.0054, 1.4991
|
||||
bone.roll = 0.0005
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['neck_base']
|
||||
bone = arm.edit_bones.new('DLT-shoulder.R')
|
||||
bone.head[:] = -0.0141, -0.0346, 1.4991
|
||||
bone.tail[:] = -0.1226, 0.0054, 1.4991
|
||||
bone.roll = -0.0005
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['neck_base']
|
||||
bone = arm.edit_bones.new('neck.01')
|
||||
bone.head[:] = -0.0000, 0.0401, 1.5389
|
||||
bone.tail[:] = -0.0000, 0.0176, 1.5916
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['head']
|
||||
bone = arm.edit_bones.new('shoulder.L')
|
||||
bone.head[:] = 0.0141, -0.0346, 1.4991
|
||||
bone.tail[:] = 0.1226, 0.0216, 1.5270
|
||||
bone.roll = -0.1225
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['DLT-shoulder.L']
|
||||
bone = arm.edit_bones.new('shoulder.R')
|
||||
bone.head[:] = -0.0141, -0.0346, 1.4991
|
||||
bone.tail[:] = -0.1226, 0.0216, 1.5270
|
||||
bone.roll = 0.0849
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['DLT-shoulder.R']
|
||||
bone = arm.edit_bones.new('neck.02')
|
||||
bone.head[:] = -0.0000, 0.0176, 1.5916
|
||||
bone.tail[:] = -0.0000, 0.0001, 1.6499
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.01']
|
||||
bone = arm.edit_bones.new('DLT-upper_arm.L')
|
||||
bone.head[:] = 0.1482, 0.0483, 1.4943
|
||||
bone.tail[:] = 0.2586, 0.1057, 1.5124
|
||||
bone.roll = 1.4969
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['shoulder.L']
|
||||
bone = arm.edit_bones.new('DLT-upper_arm.R')
|
||||
bone.head[:] = -0.1482, 0.0483, 1.4943
|
||||
bone.tail[:] = -0.2586, 0.1057, 1.5124
|
||||
bone.roll = -1.4482
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['shoulder.R']
|
||||
bone = arm.edit_bones.new('neck.03')
|
||||
bone.head[:] = -0.0000, 0.0001, 1.6499
|
||||
bone.tail[:] = -0.0000, 0.0001, 1.8522
|
||||
bone.roll = 0.0000
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['neck.02']
|
||||
bone = arm.edit_bones.new('upper_arm.L')
|
||||
bone.head[:] = 0.1482, 0.0483, 1.4943
|
||||
bone.tail[:] = 0.3929, 0.0522, 1.4801
|
||||
bone.roll = 1.6281
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['DLT-upper_arm.L']
|
||||
bone = arm.edit_bones.new('upper_arm.R')
|
||||
bone.head[:] = -0.1482, 0.0483, 1.4943
|
||||
bone.tail[:] = -0.3929, 0.0522, 1.4801
|
||||
bone.roll = -1.6281
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['DLT-upper_arm.R']
|
||||
bone = arm.edit_bones.new('forearm.L')
|
||||
bone.head[:] = 0.3929, 0.0522, 1.4801
|
||||
bone.tail[:] = 0.6198, 0.0364, 1.4906
|
||||
bone.roll = 1.5240
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['upper_arm.L']
|
||||
bone = arm.edit_bones.new('forearm.R')
|
||||
bone.head[:] = -0.3929, 0.0522, 1.4801
|
||||
bone.tail[:] = -0.6198, 0.0364, 1.4906
|
||||
bone.roll = -1.5219
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['upper_arm.R']
|
||||
bone = arm.edit_bones.new('hand.L')
|
||||
bone.head[:] = 0.6198, 0.0364, 1.4906
|
||||
bone.tail[:] = 0.6592, 0.0364, 1.4853
|
||||
bone.roll = -3.0065
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['forearm.L']
|
||||
bone = arm.edit_bones.new('hand.R')
|
||||
bone.head[:] = -0.6198, 0.0364, 1.4906
|
||||
bone.tail[:] = -0.6592, 0.0364, 1.4853
|
||||
bone.roll = 3.0065
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['forearm.R']
|
||||
bone = arm.edit_bones.new('palm.04.L')
|
||||
bone.head[:] = 0.6514, 0.0658, 1.4906
|
||||
bone.tail[:] = 0.7287, 0.0810, 1.4747
|
||||
bone.roll = -3.0715
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.L']
|
||||
bone = arm.edit_bones.new('palm.03.L')
|
||||
bone.head[:] = 0.6533, 0.0481, 1.4943
|
||||
bone.tail[:] = 0.7386, 0.0553, 1.4781
|
||||
bone.roll = -3.0290
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.L']
|
||||
bone = arm.edit_bones.new('palm.02.L')
|
||||
bone.head[:] = 0.6539, 0.0305, 1.4967
|
||||
bone.tail[:] = 0.7420, 0.0250, 1.4835
|
||||
bone.roll = -3.0669
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.L']
|
||||
bone = arm.edit_bones.new('palm.01.L')
|
||||
bone.head[:] = 0.6514, 0.0116, 1.4961
|
||||
bone.tail[:] = 0.7361, -0.0074, 1.4823
|
||||
bone.roll = -2.9422
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.L']
|
||||
bone = arm.edit_bones.new('thumb.01.L')
|
||||
bone.head[:] = 0.6380, -0.0005, 1.4848
|
||||
bone.tail[:] = 0.6757, -0.0408, 1.4538
|
||||
bone.roll = -0.7041
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.L']
|
||||
bone = arm.edit_bones.new('palm.04.R')
|
||||
bone.head[:] = -0.6514, 0.0658, 1.4906
|
||||
bone.tail[:] = -0.7287, 0.0810, 1.4747
|
||||
bone.roll = 3.0715
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.R']
|
||||
bone = arm.edit_bones.new('palm.03.R')
|
||||
bone.head[:] = -0.6533, 0.0481, 1.4943
|
||||
bone.tail[:] = -0.7386, 0.0553, 1.4781
|
||||
bone.roll = 3.0290
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.R']
|
||||
bone = arm.edit_bones.new('palm.02.R')
|
||||
bone.head[:] = -0.6539, 0.0305, 1.4967
|
||||
bone.tail[:] = -0.7420, 0.0250, 1.4835
|
||||
bone.roll = 3.0669
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.R']
|
||||
bone = arm.edit_bones.new('thumb.01.R')
|
||||
bone.head[:] = -0.6380, -0.0005, 1.4848
|
||||
bone.tail[:] = -0.6757, -0.0408, 1.4538
|
||||
bone.roll = 0.7041
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.R']
|
||||
bone = arm.edit_bones.new('palm.01.R')
|
||||
bone.head[:] = -0.6514, 0.0116, 1.4961
|
||||
bone.tail[:] = -0.7361, -0.0074, 1.4823
|
||||
bone.roll = 2.9332
|
||||
bone.use_connect = False
|
||||
bone.parent = arm.edit_bones['hand.R']
|
||||
bone = arm.edit_bones.new('finger_pinky.01.L')
|
||||
bone.head[:] = 0.7287, 0.0810, 1.4747
|
||||
bone.tail[:] = 0.7698, 0.0947, 1.4635
|
||||
bone.roll = -3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.04.L']
|
||||
bone = arm.edit_bones.new('finger_ring.01.L')
|
||||
bone.head[:] = 0.7386, 0.0553, 1.4781
|
||||
bone.tail[:] = 0.7890, 0.0615, 1.4667
|
||||
bone.roll = -3.0081
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.03.L']
|
||||
bone = arm.edit_bones.new('finger_middle.01.L')
|
||||
bone.head[:] = 0.7420, 0.0250, 1.4835
|
||||
bone.tail[:] = 0.7975, 0.0221, 1.4712
|
||||
bone.roll = -2.9982
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.02.L']
|
||||
bone = arm.edit_bones.new('finger_index.01.L')
|
||||
bone.head[:] = 0.7361, -0.0074, 1.4823
|
||||
bone.tail[:] = 0.7843, -0.0204, 1.4718
|
||||
bone.roll = -3.0021
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.01.L']
|
||||
bone = arm.edit_bones.new('thumb.02.L')
|
||||
bone.head[:] = 0.6757, -0.0408, 1.4538
|
||||
bone.tail[:] = 0.6958, -0.0568, 1.4376
|
||||
bone.roll = -0.6963
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thumb.01.L']
|
||||
bone = arm.edit_bones.new('finger_pinky.01.R')
|
||||
bone.head[:] = -0.7287, 0.0810, 1.4747
|
||||
bone.tail[:] = -0.7698, 0.0947, 1.4635
|
||||
bone.roll = 3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.04.R']
|
||||
bone = arm.edit_bones.new('finger_ring.01.R')
|
||||
bone.head[:] = -0.7386, 0.0553, 1.4781
|
||||
bone.tail[:] = -0.7890, 0.0615, 1.4667
|
||||
bone.roll = 2.9892
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.03.R']
|
||||
bone = arm.edit_bones.new('finger_middle.01.R')
|
||||
bone.head[:] = -0.7420, 0.0250, 1.4835
|
||||
bone.tail[:] = -0.7975, 0.0221, 1.4712
|
||||
bone.roll = 2.9816
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.02.R']
|
||||
bone = arm.edit_bones.new('thumb.02.R')
|
||||
bone.head[:] = -0.6757, -0.0408, 1.4538
|
||||
bone.tail[:] = -0.6958, -0.0568, 1.4376
|
||||
bone.roll = 0.6963
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thumb.01.R']
|
||||
bone = arm.edit_bones.new('finger_index.01.R')
|
||||
bone.head[:] = -0.7361, -0.0074, 1.4823
|
||||
bone.tail[:] = -0.7843, -0.0204, 1.4718
|
||||
bone.roll = 2.9498
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['palm.01.R']
|
||||
bone = arm.edit_bones.new('finger_pinky.02.L')
|
||||
bone.head[:] = 0.7698, 0.0947, 1.4635
|
||||
bone.tail[:] = 0.7910, 0.1018, 1.4577
|
||||
bone.roll = -3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_pinky.01.L']
|
||||
bone = arm.edit_bones.new('finger_ring.02.L')
|
||||
bone.head[:] = 0.7890, 0.0615, 1.4667
|
||||
bone.tail[:] = 0.8177, 0.0650, 1.4600
|
||||
bone.roll = -3.0006
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_ring.01.L']
|
||||
bone = arm.edit_bones.new('finger_middle.02.L')
|
||||
bone.head[:] = 0.7975, 0.0221, 1.4712
|
||||
bone.tail[:] = 0.8289, 0.0206, 1.4643
|
||||
bone.roll = -2.9995
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_middle.01.L']
|
||||
bone = arm.edit_bones.new('finger_index.02.L')
|
||||
bone.head[:] = 0.7843, -0.0204, 1.4718
|
||||
bone.tail[:] = 0.8117, -0.0275, 1.4660
|
||||
bone.roll = -3.0064
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_index.01.L']
|
||||
bone = arm.edit_bones.new('thumb.03.L')
|
||||
bone.head[:] = 0.6958, -0.0568, 1.4376
|
||||
bone.tail[:] = 0.7196, -0.0671, 1.4210
|
||||
bone.roll = -0.8072
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thumb.02.L']
|
||||
bone = arm.edit_bones.new('finger_pinky.02.R')
|
||||
bone.head[:] = -0.7698, 0.0947, 1.4635
|
||||
bone.tail[:] = -0.7910, 0.1018, 1.4577
|
||||
bone.roll = 3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_pinky.01.R']
|
||||
bone = arm.edit_bones.new('finger_ring.02.R')
|
||||
bone.head[:] = -0.7890, 0.0615, 1.4667
|
||||
bone.tail[:] = -0.8177, 0.0650, 1.4600
|
||||
bone.roll = 3.0341
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_ring.01.R']
|
||||
bone = arm.edit_bones.new('finger_middle.02.R')
|
||||
bone.head[:] = -0.7975, 0.0221, 1.4712
|
||||
bone.tail[:] = -0.8289, 0.0206, 1.4643
|
||||
bone.roll = 3.0291
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_middle.01.R']
|
||||
bone = arm.edit_bones.new('thumb.03.R')
|
||||
bone.head[:] = -0.6958, -0.0568, 1.4376
|
||||
bone.tail[:] = -0.7196, -0.0671, 1.4210
|
||||
bone.roll = 0.8072
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['thumb.02.R']
|
||||
bone = arm.edit_bones.new('finger_index.02.R')
|
||||
bone.head[:] = -0.7843, -0.0204, 1.4718
|
||||
bone.tail[:] = -0.8117, -0.0275, 1.4660
|
||||
bone.roll = 3.0705
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_index.01.R']
|
||||
bone = arm.edit_bones.new('finger_pinky.03.L')
|
||||
bone.head[:] = 0.7910, 0.1018, 1.4577
|
||||
bone.tail[:] = 0.8109, 0.1085, 1.4523
|
||||
bone.roll = -3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_pinky.02.L']
|
||||
bone = arm.edit_bones.new('finger_ring.03.L')
|
||||
bone.head[:] = 0.8177, 0.0650, 1.4600
|
||||
bone.tail[:] = 0.8396, 0.0677, 1.4544
|
||||
bone.roll = -2.9819
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_ring.02.L']
|
||||
bone = arm.edit_bones.new('finger_middle.03.L')
|
||||
bone.head[:] = 0.8289, 0.0206, 1.4643
|
||||
bone.tail[:] = 0.8534, 0.0193, 1.4589
|
||||
bone.roll = -3.0004
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_middle.02.L']
|
||||
bone = arm.edit_bones.new('finger_index.03.L')
|
||||
bone.head[:] = 0.8117, -0.0275, 1.4660
|
||||
bone.tail[:] = 0.8331, -0.0333, 1.4615
|
||||
bone.roll = -3.0103
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_index.02.L']
|
||||
bone = arm.edit_bones.new('finger_pinky.03.R')
|
||||
bone.head[:] = -0.7910, 0.1018, 1.4577
|
||||
bone.tail[:] = -0.8109, 0.1085, 1.4523
|
||||
bone.roll = 3.0949
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_pinky.02.R']
|
||||
bone = arm.edit_bones.new('finger_ring.03.R')
|
||||
bone.head[:] = -0.8177, 0.0650, 1.4600
|
||||
bone.tail[:] = -0.8396, 0.0677, 1.4544
|
||||
bone.roll = 2.9819
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_ring.02.R']
|
||||
bone = arm.edit_bones.new('finger_middle.03.R')
|
||||
bone.head[:] = -0.8289, 0.0206, 1.4643
|
||||
bone.tail[:] = -0.8534, 0.0193, 1.4589
|
||||
bone.roll = 3.0004
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_middle.02.R']
|
||||
bone = arm.edit_bones.new('finger_index.03.R')
|
||||
bone.head[:] = -0.8117, -0.0275, 1.4660
|
||||
bone.tail[:] = -0.8331, -0.0333, 1.4615
|
||||
bone.roll = 2.9917
|
||||
bone.use_connect = True
|
||||
bone.parent = arm.edit_bones['finger_index.02.R']
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
pbone = obj.pose.bones['torso']
|
||||
pbone['type'] = 'spine_pivot_flex'
|
||||
pbone = obj.pose.bones['torso']
|
||||
pbone['spine_pivot_flex.later_main'] = 1
|
||||
pbone = obj.pose.bones['torso']
|
||||
pbone['spine_pivot_flex.layer_extra'] = 2
|
||||
pbone = obj.pose.bones['thigh.L']
|
||||
pbone['type'] = 'leg_biped'
|
||||
pbone = obj.pose.bones['thigh.L']
|
||||
pbone['leg_biped_generic.layer_ik'] = 12
|
||||
pbone = obj.pose.bones['thigh.L']
|
||||
pbone['leg_biped_generic.layer_fk'] = 11
|
||||
pbone = obj.pose.bones['thigh.R']
|
||||
pbone['type'] = 'leg_biped'
|
||||
pbone = obj.pose.bones['thigh.R']
|
||||
pbone['leg_biped_generic.layer_ik'] = 14
|
||||
pbone = obj.pose.bones['thigh.R']
|
||||
pbone['leg_biped_generic.layer_fk'] = 13
|
||||
pbone = obj.pose.bones['head']
|
||||
pbone['type'] = 'neck_flex'
|
||||
pbone = obj.pose.bones['head']
|
||||
pbone['neck_flex.layer_extra'] = 4
|
||||
pbone = obj.pose.bones['head']
|
||||
pbone['neck_flex.layer_main'] = 3
|
||||
pbone = obj.pose.bones['DLT-shoulder.L']
|
||||
pbone['type'] = 'delta'
|
||||
pbone = obj.pose.bones['DLT-shoulder.R']
|
||||
pbone['type'] = 'delta'
|
||||
pbone = obj.pose.bones['shoulder.L']
|
||||
pbone['type'] = 'copy'
|
||||
pbone = obj.pose.bones['shoulder.L']
|
||||
pbone['copy.layers'] = 1
|
||||
pbone = obj.pose.bones['shoulder.R']
|
||||
pbone['type'] = 'copy'
|
||||
pbone = obj.pose.bones['shoulder.R']
|
||||
pbone['copy.layers'] = 1
|
||||
pbone = obj.pose.bones['DLT-upper_arm.L']
|
||||
pbone['type'] = 'delta'
|
||||
pbone = obj.pose.bones['DLT-upper_arm.R']
|
||||
pbone['type'] = 'delta'
|
||||
pbone = obj.pose.bones['upper_arm.L']
|
||||
pbone['type'] = 'arm_biped'
|
||||
pbone = obj.pose.bones['upper_arm.L']
|
||||
pbone['arm_biped_generic.elbow_parent'] = 'spine.04'
|
||||
pbone = obj.pose.bones['upper_arm.L']
|
||||
pbone['arm_biped_generic.layer_fk'] = 7
|
||||
pbone = obj.pose.bones['upper_arm.L']
|
||||
pbone['arm_biped_generic.layer_ik'] = 8
|
||||
pbone = obj.pose.bones['upper_arm.R']
|
||||
pbone['type'] = 'arm_biped'
|
||||
pbone = obj.pose.bones['upper_arm.R']
|
||||
pbone['arm_biped_generic.layer_fk'] = 9
|
||||
pbone = obj.pose.bones['upper_arm.R']
|
||||
pbone['arm_biped_generic.layer_ik'] = 10
|
||||
pbone = obj.pose.bones['upper_arm.R']
|
||||
pbone['arm_biped_generic.elbow_parent'] = 'spine.04'
|
||||
pbone = obj.pose.bones['palm.01.L']
|
||||
pbone['type'] = 'palm_curl'
|
||||
pbone = obj.pose.bones['palm.01.L']
|
||||
pbone['palm_curl.layers'] = 5
|
||||
pbone = obj.pose.bones['thumb.01.L']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['thumb.01.L']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['thumb.01.L']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['thumb.01.R']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['thumb.01.R']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['thumb.01.R']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['palm.01.R']
|
||||
pbone['type'] = 'palm_curl'
|
||||
pbone = obj.pose.bones['palm.01.R']
|
||||
pbone['palm_curl.layers'] = 5
|
||||
pbone = obj.pose.bones['finger_pinky.01.L']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_pinky.01.L']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_pinky.01.L']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_ring.01.L']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_ring.01.L']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_ring.01.L']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_middle.01.L']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_middle.01.L']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_middle.01.L']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_index.01.L']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_index.01.L']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_index.01.L']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_pinky.01.R']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_pinky.01.R']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_pinky.01.R']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_ring.01.R']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_ring.01.R']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_ring.01.R']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_middle.01.R']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_middle.01.R']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_middle.01.R']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
pbone = obj.pose.bones['finger_index.01.R']
|
||||
pbone['type'] = 'finger_curl'
|
||||
pbone = obj.pose.bones['finger_index.01.R']
|
||||
pbone['finger_curl.layer_main'] = 5
|
||||
pbone = obj.pose.bones['finger_index.01.R']
|
||||
pbone['finger_curl.layer_extra'] = 6
|
||||
|
||||
|
||||
class AddHuman(bpy.types.Operator):
|
||||
'''Add an advanced human metarig base'''
|
||||
bl_idname = "object.armature_human_advanced_add"
|
||||
bl_label = "Add Humanoid (advanced metarig)"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
def execute(self, context):
|
||||
bpy.ops.object.armature_add()
|
||||
obj = context.active_object
|
||||
mode_orig = obj.mode
|
||||
bpy.ops.object.mode_set(mode='EDIT') # grr, remove bone
|
||||
bones = context.active_object.data.edit_bones
|
||||
bones.remove(bones[0])
|
||||
metarig_template()
|
||||
bpy.ops.object.mode_set(mode=mode_orig)
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
# Add to a menu
|
||||
menu_func = (lambda self, context: self.layout.operator(AddHuman.bl_idname,
|
||||
icon='OUTLINER_OB_ARMATURE', text="Human (Meta-Rig)"))
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_armature_add.append(menu_func)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_armature_add.remove(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -26,8 +26,8 @@ def add_torus(major_rad, minor_rad, major_seg, minor_seg):
|
||||
Vector = mathutils.Vector
|
||||
Quaternion = mathutils.Quaternion
|
||||
|
||||
PI_2 = pi * 2
|
||||
z_axis = (0, 0, 1)
|
||||
PI_2 = pi * 2.0
|
||||
z_axis = 0.0, 0.0, 1.0
|
||||
|
||||
verts = []
|
||||
faces = []
|
||||
@ -103,8 +103,12 @@ class AddTorus(bpy.types.Operator):
|
||||
default=0.5, min=0.01, max=100.0)
|
||||
|
||||
# generic transform props
|
||||
location = FloatVectorProperty(name="Location")
|
||||
rotation = FloatVectorProperty(name="Rotation")
|
||||
view_align = BoolProperty(name="Align to View",
|
||||
default=False)
|
||||
location = FloatVectorProperty(name="Location",
|
||||
subtype='TRANSLATION')
|
||||
rotation = FloatVectorProperty(name="Rotation",
|
||||
subtype='EULER')
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
@ -128,7 +132,7 @@ class AddTorus(bpy.types.Operator):
|
||||
mesh.update()
|
||||
|
||||
import add_object_utils
|
||||
add_object_utils.add_object_data(context, mesh, operator=self)
|
||||
add_object_utils.object_data_add(context, mesh, operator=self)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -138,10 +142,12 @@ def menu_func(self, context):
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_class(AddTorus)
|
||||
bpy.types.INFO_MT_mesh_add.append(menu_func)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_class(AddTorus)
|
||||
bpy.types.INFO_MT_mesh_add.remove(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -367,6 +367,7 @@ data_path_update = [
|
||||
("PoseBone", "ik_limit_z", "use_ik_limit_z"),
|
||||
("PoseBone", "ik_lin_control", "use_ik_linear_control"),
|
||||
("PoseBone", "ik_rot_control", "use_ik_rotation_control"),
|
||||
("Bone", "use_hinge", "use_inherit_rotation"),
|
||||
("SPHFluidSettings", "spring_k", "spring_force"),
|
||||
("SPHFluidSettings", "stiffness_k", "stiffness"),
|
||||
("SPHFluidSettings", "stiffness_knear", "stiffness_near"),
|
||||
@ -674,6 +675,10 @@ data_path_update = [
|
||||
("SequenceCrop", "top", "max_y"),
|
||||
("SequenceCrop", "bottom", "min_x"),
|
||||
("SequenceCrop", "left", "min_y"),
|
||||
("Sequence", "speed_fader", "speed_factor"),
|
||||
("SpeedControlSequence", "global_speed", "multiply_speed"),
|
||||
("SpeedControlSequence", "use_curve_velocity", "use_as_speed"),
|
||||
("SpeedControlSequence", "use_curve_compress_y", "scale_to_length"),
|
||||
]
|
||||
|
||||
|
||||
@ -687,7 +692,6 @@ class UpdateAnimData(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
import animsys_refactor
|
||||
reload(animsys_refactor)
|
||||
animsys_refactor.update_data_paths(data_path_update)
|
||||
return {'FINISHED'}
|
||||
|
||||
@ -695,5 +699,10 @@ class UpdateAnimData(bpy.types.Operator):
|
||||
if __name__ == "__main__":
|
||||
bpy.ops.anim.update_data_paths()
|
||||
|
||||
|
||||
def register():
|
||||
pass
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
@ -83,6 +83,9 @@ def get_console(console_id):
|
||||
namespace["bpy"] = bpy
|
||||
namespace["C"] = bpy.context
|
||||
|
||||
namespace.update(__import__("mathutils").__dict__) # from mathutils import *
|
||||
namespace.update(__import__("math").__dict__) # from math import *
|
||||
|
||||
console = InteractiveConsole(locals=namespace, filename="<blender_console>")
|
||||
|
||||
if _BPY_MAIN_OWN:
|
||||
@ -110,9 +113,6 @@ def execute(context):
|
||||
except:
|
||||
return {'CANCELLED'}
|
||||
|
||||
if sc.console_type != 'PYTHON':
|
||||
return {'CANCELLED'}
|
||||
|
||||
console, stdout, stderr = get_console(hash(context.region))
|
||||
|
||||
# redirect output
|
||||
@ -161,6 +161,10 @@ def execute(context):
|
||||
stdout.truncate(0)
|
||||
stderr.truncate(0)
|
||||
|
||||
# special exception. its possible the command loaded a new user interface
|
||||
if hash(sc) != hash(context.space_data):
|
||||
return
|
||||
|
||||
bpy.ops.console.scrollback_append(text=sc.prompt + line, type='INPUT')
|
||||
|
||||
if is_multiline:
|
||||
@ -183,8 +187,14 @@ def execute(context):
|
||||
# restore the stdin
|
||||
sys.stdin = stdin_backup
|
||||
|
||||
# execute any hooks
|
||||
for func, args in execute.hooks:
|
||||
func(*args)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
execute.hooks = []
|
||||
|
||||
|
||||
def autocomplete(context):
|
||||
from console import intellisense
|
||||
@ -196,9 +206,6 @@ def autocomplete(context):
|
||||
if not console:
|
||||
return {'CANCELLED'}
|
||||
|
||||
if sc.console_type != 'PYTHON':
|
||||
return {'CANCELLED'}
|
||||
|
||||
# dont allow the stdin to be used, can lock blender.
|
||||
# note: unlikely stdin would be used for autocomp. but its possible.
|
||||
stdin_backup = sys.stdin
|
||||
@ -257,14 +264,16 @@ def banner(context):
|
||||
sc = context.space_data
|
||||
version_string = sys.version.strip().replace('\n', ' ')
|
||||
|
||||
add_scrollback(" * Python Interactive Console %s *" % version_string, 'OUTPUT')
|
||||
add_scrollback("Command History: Up/Down Arrow", 'OUTPUT')
|
||||
add_scrollback("Cursor: Left/Right Home/End", 'OUTPUT')
|
||||
add_scrollback("Remove: Backspace/Delete", 'OUTPUT')
|
||||
add_scrollback("Execute: Enter", 'OUTPUT')
|
||||
add_scrollback("Autocomplete: Ctrl+Space", 'OUTPUT')
|
||||
add_scrollback("Ctrl +/- Wheel: Zoom", 'OUTPUT')
|
||||
add_scrollback("Builtin Modules: bpy, bpy.data, bpy.ops, bpy.props, bpy.types, bpy.context, bgl, blf, mathutils, geometry", 'OUTPUT')
|
||||
add_scrollback("PYTHON INTERACTIVE CONSOLE %s" % version_string, 'OUTPUT')
|
||||
add_scrollback("", 'OUTPUT')
|
||||
add_scrollback("Command History: Up/Down Arrow", 'OUTPUT')
|
||||
add_scrollback("Cursor: Left/Right Home/End", 'OUTPUT')
|
||||
add_scrollback("Remove: Backspace/Delete", 'OUTPUT')
|
||||
add_scrollback("Execute: Enter", 'OUTPUT')
|
||||
add_scrollback("Autocomplete: Ctrl+Space", 'OUTPUT')
|
||||
add_scrollback("Ctrl +/- Wheel: Zoom", 'OUTPUT')
|
||||
add_scrollback("Builtin Modules: bpy, bpy.data, bpy.ops, bpy.props, bpy.types, bpy.context, bgl, blf, mathutils", 'OUTPUT')
|
||||
add_scrollback("Convenience Imports: from mathutils import *; from math import *", 'OUTPUT')
|
||||
add_scrollback("", 'OUTPUT')
|
||||
add_scrollback(" WARNING!!! Blender 2.5 API is subject to change, see API reference for more info.", 'ERROR')
|
||||
add_scrollback("", 'OUTPUT')
|
||||
|
@ -1,7 +1,28 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
from math import *
|
||||
import bpy
|
||||
from mathutils import *
|
||||
|
||||
|
||||
def main(context):
|
||||
def cleanupEulCurve(fcv):
|
||||
keys = []
|
||||
@ -12,37 +33,38 @@ def main(context):
|
||||
|
||||
for i in range(len(keys)):
|
||||
cur = keys[i]
|
||||
prev = keys[i-1] if i > 0 else None
|
||||
next = keys[i+1] if i < len(keys)-1 else None
|
||||
prev = keys[i - 1] if i > 0 else None
|
||||
next = keys[i + 1] if i < len(keys) - 1 else None
|
||||
|
||||
if prev is None:
|
||||
continue
|
||||
|
||||
th = pi
|
||||
if abs(prev[1][1] - cur[1][1]) >= th: # more than 180 degree jump
|
||||
fac = pi*2
|
||||
if abs(prev[1][1] - cur[1][1]) >= th: # more than 180 degree jump
|
||||
fac = pi * 2.0
|
||||
if prev[1][1] > cur[1][1]:
|
||||
while abs(cur[1][1]-prev[1][1]) >= th: # < prev[1][1]:
|
||||
while abs(cur[1][1] - prev[1][1]) >= th: # < prev[1][1]:
|
||||
cur[0][1] += fac
|
||||
cur[1][1] += fac
|
||||
cur[2][1] += fac
|
||||
elif prev[1][1] < cur[1][1]:
|
||||
while abs(cur[1][1]-prev[1][1]) >= th:
|
||||
while abs(cur[1][1] - prev[1][1]) >= th:
|
||||
cur[0][1] -= fac
|
||||
cur[1][1] -= fac
|
||||
cur[2][1] -= fac
|
||||
|
||||
for i in range(len(keys)):
|
||||
for x in range(2):
|
||||
fcv.keyframe_points[i].handle_left[x] = keys[i][0][x]
|
||||
fcv.keyframe_points[i].co[x] = keys[i][1][x]
|
||||
fcv.keyframe_points[i].handle_right[x] = keys[i][2][x]
|
||||
fcv.keyframe_points[i].handle_left[x] = keys[i][0][x]
|
||||
fcv.keyframe_points[i].co[x] = keys[i][1][x]
|
||||
fcv.keyframe_points[i].handle_right[x] = keys[i][2][x]
|
||||
|
||||
flist = bpy.context.active_object.animation_data.action.fcurves
|
||||
for f in flist:
|
||||
if f.select and f.data_path.endswith("rotation_euler"):
|
||||
cleanupEulCurve(f)
|
||||
|
||||
|
||||
class DiscontFilterOp(bpy.types.Operator):
|
||||
"""Fixes the most common causes of gimbal lock in the fcurves of the active bone"""
|
||||
bl_idname = "graph.euler_filter"
|
||||
@ -57,10 +79,11 @@ class DiscontFilterOp(bpy.types.Operator):
|
||||
return {'FINISHED'}
|
||||
|
||||
def register():
|
||||
pass
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@ -32,13 +32,17 @@ class EditExternally(bpy.types.Operator):
|
||||
|
||||
def _editor_guess(self, context):
|
||||
import platform
|
||||
system = platform.system()
|
||||
try:
|
||||
system = platform.system()
|
||||
except UnicodeDecodeError:
|
||||
import sys
|
||||
system = sys.platform
|
||||
|
||||
image_editor = context.user_preferences.filepaths.image_editor
|
||||
|
||||
# use image editor in the preferences when available.
|
||||
if not image_editor:
|
||||
if system == 'Windows':
|
||||
if system in ('Windows', 'win32'):
|
||||
image_editor = ["start"] # not tested!
|
||||
elif system == 'Darwin':
|
||||
image_editor = ["open"]
|
||||
@ -84,7 +88,7 @@ class EditExternally(bpy.types.Operator):
|
||||
|
||||
|
||||
class SaveDirty(bpy.types.Operator):
|
||||
'''Select object matching a naming pattern'''
|
||||
"""Save all modified textures"""
|
||||
bl_idname = "image.save_dirty"
|
||||
bl_label = "Save Dirty"
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
@ -105,7 +109,7 @@ class SaveDirty(bpy.types.Operator):
|
||||
|
||||
|
||||
class ProjectEdit(bpy.types.Operator):
|
||||
'''Select object matching a naming pattern'''
|
||||
"""Edit a snapshot if the viewport in an external image editor"""
|
||||
bl_idname = "image.project_edit"
|
||||
bl_label = "Project Edit"
|
||||
bl_options = {'REGISTER'}
|
||||
@ -139,7 +143,7 @@ class ProjectEdit(bpy.types.Operator):
|
||||
# filepath = bpy.path.clean_name(filepath) # fixes <memory> rubbish, needs checking
|
||||
|
||||
if filepath.startswith(".") or filepath == "":
|
||||
# TODO, have a way to check if the file is saved, assume .B25.blend
|
||||
# TODO, have a way to check if the file is saved, assume startup.blend
|
||||
tmpdir = context.user_preferences.filepaths.temporary_directory
|
||||
filepath = os.path.join(tmpdir, "project_edit")
|
||||
else:
|
||||
@ -170,7 +174,7 @@ class ProjectEdit(bpy.types.Operator):
|
||||
|
||||
|
||||
class ProjectApply(bpy.types.Operator):
|
||||
'''Select object matching a naming pattern'''
|
||||
"""Project edited image back onto the object"""
|
||||
bl_idname = "image.project_apply"
|
||||
bl_label = "Project Apply"
|
||||
bl_options = {'REGISTER'}
|
||||
@ -191,11 +195,11 @@ class ProjectApply(bpy.types.Operator):
|
||||
|
||||
|
||||
def register():
|
||||
pass
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@ -1,81 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
# only reload if we alredy loaded, highly annoying
|
||||
import sys
|
||||
reload(sys.modules.get("io_anim_bvh.import_bvh", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ImportHelper
|
||||
|
||||
|
||||
class BvhImporter(bpy.types.Operator, ImportHelper):
|
||||
'''Load a OBJ Motion Capture File'''
|
||||
bl_idname = "import_anim.bvh"
|
||||
bl_label = "Import BVH"
|
||||
|
||||
filename_ext = ".bvh"
|
||||
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
|
||||
|
||||
scale = FloatProperty(name="Scale", description="Scale the BVH by this value", min=0.0001, max=1000000.0, soft_min=0.001, soft_max=100.0, default=0.1)
|
||||
frame_start = IntProperty(name="Start Frame", description="Starting frame for the animation", default=1)
|
||||
use_cyclic = BoolProperty(name="Loop", description="Loop the animation playback", default=False)
|
||||
rotate_mode = EnumProperty(items=(
|
||||
('QUATERNION', "Quaternion", "Convert rotations to quaternions"),
|
||||
('NATIVE', "Euler (Native)", "Use the rotation order defined in the BVH file"),
|
||||
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
|
||||
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
|
||||
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
|
||||
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
|
||||
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
|
||||
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
|
||||
),
|
||||
name="Rotation",
|
||||
description="Rotation conversion.",
|
||||
default='NATIVE')
|
||||
|
||||
def execute(self, context):
|
||||
import io_anim_bvh.import_bvh
|
||||
return io_anim_bvh.import_bvh.load(self, context,
|
||||
filepath=self.filepath,
|
||||
rotate_mode=self.rotate_mode,
|
||||
scale=self.scale,
|
||||
use_cyclic=self.use_cyclic,
|
||||
frame_start=self.frame_start,
|
||||
)
|
||||
|
||||
|
||||
def menu_func(self, context):
|
||||
self.layout.operator(BvhImporter.bl_idname, text="Motion Capture (.bvh)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_import.append(menu_func)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,568 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) Campbell Barton
|
||||
|
||||
import math
|
||||
from math import radians
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from mathutils import Vector, Euler, Matrix
|
||||
|
||||
|
||||
class bvh_node_class(object):
|
||||
__slots__ = (
|
||||
'name',# bvh joint name
|
||||
'parent',# bvh_node_class type or None for no parent
|
||||
'children',# a list of children of this type.
|
||||
'rest_head_world',# worldspace rest location for the head of this node
|
||||
'rest_head_local',# localspace rest location for the head of this node
|
||||
'rest_tail_world',# # worldspace rest location for the tail of this node
|
||||
'rest_tail_local',# # worldspace rest location for the tail of this node
|
||||
'channels',# list of 6 ints, -1 for an unused channel, otherwise an index for the BVH motion data lines, lock triple then rot triple
|
||||
'rot_order',# a triple of indicies as to the order rotation is applied. [0,1,2] is x/y/z - [None, None, None] if no rotation.
|
||||
'anim_data',# a list one tuple's one for each frame. (locx, locy, locz, rotx, roty, rotz)
|
||||
'has_loc',# Conveinience function, bool, same as (channels[0]!=-1 or channels[1]!=-1 channels[2]!=-1)
|
||||
'has_rot',# Conveinience function, bool, same as (channels[3]!=-1 or channels[4]!=-1 channels[5]!=-1)
|
||||
'temp')# use this for whatever you want
|
||||
|
||||
def __init__(self, name, rest_head_world, rest_head_local, parent, channels, rot_order):
|
||||
self.name = name
|
||||
self.rest_head_world = rest_head_world
|
||||
self.rest_head_local = rest_head_local
|
||||
self.rest_tail_world = None
|
||||
self.rest_tail_local = None
|
||||
self.parent = parent
|
||||
self.channels = channels
|
||||
self.rot_order = rot_order
|
||||
|
||||
# convenience functions
|
||||
self.has_loc = channels[0] != -1 or channels[1] != -1 or channels[2] != -1
|
||||
self.has_rot = channels[3] != -1 or channels[4] != -1 or channels[5] != -1
|
||||
|
||||
|
||||
self.children = []
|
||||
|
||||
# list of 6 length tuples: (lx,ly,lz, rx,ry,rz)
|
||||
# even if the channels arnt used they will just be zero
|
||||
#
|
||||
self.anim_data = [(0, 0, 0, 0, 0, 0)]
|
||||
|
||||
def __repr__(self):
|
||||
return 'BVH name:"%s", rest_loc:(%.3f,%.3f,%.3f), rest_tail:(%.3f,%.3f,%.3f)' %\
|
||||
(self.name,\
|
||||
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z,\
|
||||
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z)
|
||||
|
||||
|
||||
# Change the order rotation is applied.
|
||||
MATRIX_IDENTITY_3x3 = Matrix([1, 0, 0], [0, 1, 0], [0, 0, 1])
|
||||
MATRIX_IDENTITY_4x4 = Matrix([1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1])
|
||||
|
||||
|
||||
def eulerRotate(x, y, z, rot_order):
|
||||
# Clamp all values between 0 and 360, values outside this raise an error.
|
||||
mats = [Matrix.Rotation(x, 3, 'X'), Matrix.Rotation(y, 3, 'Y'), Matrix.Rotation(z, 3, 'Z')]
|
||||
return (MATRIX_IDENTITY_3x3 * mats[rot_order[0]] * (mats[rot_order[1]] * (mats[rot_order[2]]))).to_euler()
|
||||
|
||||
# Should work but doesnt!
|
||||
'''
|
||||
eul = Euler((x, y, z))
|
||||
eul.order = "XYZ"[rot_order[0]] + "XYZ"[rot_order[1]] + "XYZ"[rot_order[2]]
|
||||
return tuple(eul.to_matrix().to_euler())
|
||||
'''
|
||||
|
||||
|
||||
def read_bvh(context, file_path, ROT_MODE='XYZ', GLOBAL_SCALE=1.0):
|
||||
# File loading stuff
|
||||
# Open the file for importing
|
||||
file = open(file_path, 'rU')
|
||||
|
||||
# Seperate into a list of lists, each line a list of words.
|
||||
file_lines = file.readlines()
|
||||
# Non standard carrage returns?
|
||||
if len(file_lines) == 1:
|
||||
file_lines = file_lines[0].split('\r')
|
||||
|
||||
# Split by whitespace.
|
||||
file_lines = [ll for ll in [l.split() for l in file_lines] if ll]
|
||||
|
||||
|
||||
# Create Hirachy as empties
|
||||
|
||||
if file_lines[0][0].lower() == 'hierarchy':
|
||||
#print 'Importing the BVH Hierarchy for:', file_path
|
||||
pass
|
||||
else:
|
||||
raise 'ERROR: This is not a BVH file'
|
||||
|
||||
bvh_nodes = {None: None}
|
||||
bvh_nodes_serial = [None]
|
||||
|
||||
channelIndex = -1
|
||||
|
||||
|
||||
lineIdx = 0 # An index for the file.
|
||||
while lineIdx < len(file_lines) -1:
|
||||
#...
|
||||
if file_lines[lineIdx][0].lower() == 'root' or file_lines[lineIdx][0].lower() == 'joint':
|
||||
|
||||
# Join spaces into 1 word with underscores joining it.
|
||||
if len(file_lines[lineIdx]) > 2:
|
||||
file_lines[lineIdx][1] = '_'.join(file_lines[lineIdx][1:])
|
||||
file_lines[lineIdx] = file_lines[lineIdx][:2]
|
||||
|
||||
# MAY NEED TO SUPPORT MULTIPLE ROOT's HERE!!!, Still unsure weather multiple roots are possible.??
|
||||
|
||||
# Make sure the names are unique- Object names will match joint names exactly and both will be unique.
|
||||
name = file_lines[lineIdx][1]
|
||||
|
||||
#print '%snode: %s, parent: %s' % (len(bvh_nodes_serial) * ' ', name, bvh_nodes_serial[-1])
|
||||
|
||||
lineIdx += 2 # Incriment to the next line (Offset)
|
||||
rest_head_local = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * GLOBAL_SCALE
|
||||
lineIdx += 1 # Incriment to the next line (Channels)
|
||||
|
||||
# newChannel[Xposition, Yposition, Zposition, Xrotation, Yrotation, Zrotation]
|
||||
# newChannel references indecies to the motiondata,
|
||||
# if not assigned then -1 refers to the last value that will be added on loading at a value of zero, this is appended
|
||||
# We'll add a zero value onto the end of the MotionDATA so this is always refers to a value.
|
||||
my_channel = [-1, -1, -1, -1, -1, -1]
|
||||
my_rot_order = [None, None, None]
|
||||
rot_count = 0
|
||||
for channel in file_lines[lineIdx][2:]:
|
||||
channel = channel.lower()
|
||||
channelIndex += 1 # So the index points to the right channel
|
||||
if channel == 'xposition':
|
||||
my_channel[0] = channelIndex
|
||||
elif channel == 'yposition':
|
||||
my_channel[1] = channelIndex
|
||||
elif channel == 'zposition':
|
||||
my_channel[2] = channelIndex
|
||||
|
||||
elif channel == 'xrotation':
|
||||
my_channel[3] = channelIndex
|
||||
my_rot_order[rot_count] = 0
|
||||
rot_count += 1
|
||||
elif channel == 'yrotation':
|
||||
my_channel[4] = channelIndex
|
||||
my_rot_order[rot_count] = 1
|
||||
rot_count += 1
|
||||
elif channel == 'zrotation':
|
||||
my_channel[5] = channelIndex
|
||||
my_rot_order[rot_count] = 2
|
||||
rot_count += 1
|
||||
|
||||
channels = file_lines[lineIdx][2:]
|
||||
|
||||
my_parent = bvh_nodes_serial[-1] # account for none
|
||||
|
||||
|
||||
# Apply the parents offset accumletivly
|
||||
if my_parent is None:
|
||||
rest_head_world = Vector(rest_head_local)
|
||||
else:
|
||||
rest_head_world = my_parent.rest_head_world + rest_head_local
|
||||
|
||||
bvh_node = bvh_nodes[name] = bvh_node_class(name, rest_head_world, rest_head_local, my_parent, my_channel, my_rot_order)
|
||||
|
||||
# If we have another child then we can call ourselves a parent, else
|
||||
bvh_nodes_serial.append(bvh_node)
|
||||
|
||||
# Account for an end node
|
||||
if file_lines[lineIdx][0].lower() == 'end' and file_lines[lineIdx][1].lower() == 'site': # There is somtimes a name after 'End Site' but we will ignore it.
|
||||
lineIdx += 2 # Incriment to the next line (Offset)
|
||||
rest_tail = Vector((float(file_lines[lineIdx][1]), float(file_lines[lineIdx][2]), float(file_lines[lineIdx][3]))) * GLOBAL_SCALE
|
||||
|
||||
bvh_nodes_serial[-1].rest_tail_world = bvh_nodes_serial[-1].rest_head_world + rest_tail
|
||||
bvh_nodes_serial[-1].rest_tail_local = bvh_nodes_serial[-1].rest_head_local + rest_tail
|
||||
|
||||
|
||||
# Just so we can remove the Parents in a uniform way- End end never has kids
|
||||
# so this is a placeholder
|
||||
bvh_nodes_serial.append(None)
|
||||
|
||||
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0] == '}': # == ['}']
|
||||
bvh_nodes_serial.pop() # Remove the last item
|
||||
|
||||
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0].lower() == 'motion':
|
||||
#print '\nImporting motion data'
|
||||
lineIdx += 3 # Set the cursor to the first frame
|
||||
break
|
||||
|
||||
lineIdx += 1
|
||||
|
||||
|
||||
# Remove the None value used for easy parent reference
|
||||
del bvh_nodes[None]
|
||||
# Dont use anymore
|
||||
del bvh_nodes_serial
|
||||
|
||||
bvh_nodes_list = bvh_nodes.values()
|
||||
|
||||
while lineIdx < len(file_lines):
|
||||
line = file_lines[lineIdx]
|
||||
for bvh_node in bvh_nodes_list:
|
||||
#for bvh_node in bvh_nodes_serial:
|
||||
lx = ly = lz = rx = ry = rz = 0.0
|
||||
channels = bvh_node.channels
|
||||
anim_data = bvh_node.anim_data
|
||||
if channels[0] != -1:
|
||||
lx = GLOBAL_SCALE * float(line[channels[0]])
|
||||
|
||||
if channels[1] != -1:
|
||||
ly = GLOBAL_SCALE * float(line[channels[1]])
|
||||
|
||||
if channels[2] != -1:
|
||||
lz = GLOBAL_SCALE * float(line[channels[2]])
|
||||
|
||||
if channels[3] != -1 or channels[4] != -1 or channels[5] != -1:
|
||||
rx, ry, rz = float(line[channels[3]]), float(line[channels[4]]), float(line[channels[5]])
|
||||
|
||||
if ROT_MODE != 'NATIVE':
|
||||
rx, ry, rz = eulerRotate(radians(rx), radians(ry), radians(rz), bvh_node.rot_order)
|
||||
else:
|
||||
rx, ry, rz = radians(rx), radians(ry), radians(rz)
|
||||
|
||||
# Done importing motion data #
|
||||
anim_data.append((lx, ly, lz, rx, ry, rz))
|
||||
lineIdx += 1
|
||||
|
||||
# Assign children
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bvh_node_parent = bvh_node.parent
|
||||
if bvh_node_parent:
|
||||
bvh_node_parent.children.append(bvh_node)
|
||||
|
||||
# Now set the tip of each bvh_node
|
||||
for bvh_node in bvh_nodes.values():
|
||||
|
||||
if not bvh_node.rest_tail_world:
|
||||
if len(bvh_node.children) == 0:
|
||||
# could just fail here, but rare BVH files have childless nodes
|
||||
bvh_node.rest_tail_world = Vector(bvh_node.rest_head_world)
|
||||
bvh_node.rest_tail_local = Vector(bvh_node.rest_head_local)
|
||||
elif len(bvh_node.children) == 1:
|
||||
bvh_node.rest_tail_world = Vector(bvh_node.children[0].rest_head_world)
|
||||
bvh_node.rest_tail_local = bvh_node.rest_head_local + bvh_node.children[0].rest_head_local
|
||||
else:
|
||||
# allow this, see above
|
||||
#if not bvh_node.children:
|
||||
# raise 'error, bvh node has no end and no children. bad file'
|
||||
|
||||
# Removed temp for now
|
||||
rest_tail_world = Vector((0.0, 0.0, 0.0))
|
||||
rest_tail_local = Vector((0.0, 0.0, 0.0))
|
||||
for bvh_node_child in bvh_node.children:
|
||||
rest_tail_world += bvh_node_child.rest_head_world
|
||||
rest_tail_local += bvh_node_child.rest_head_local
|
||||
|
||||
bvh_node.rest_tail_world = rest_tail_world * (1.0 / len(bvh_node.children))
|
||||
bvh_node.rest_tail_local = rest_tail_local * (1.0 / len(bvh_node.children))
|
||||
|
||||
# Make sure tail isnt the same location as the head.
|
||||
if (bvh_node.rest_tail_local - bvh_node.rest_head_local).length <= 0.001 * GLOBAL_SCALE:
|
||||
bvh_node.rest_tail_local.y = bvh_node.rest_tail_local.y + GLOBAL_SCALE / 10
|
||||
bvh_node.rest_tail_world.y = bvh_node.rest_tail_world.y + GLOBAL_SCALE / 10
|
||||
|
||||
return bvh_nodes
|
||||
|
||||
|
||||
def bvh_node_dict2objects(context, bvh_nodes, IMPORT_START_FRAME=1, IMPORT_LOOP=False):
|
||||
|
||||
if IMPORT_START_FRAME < 1:
|
||||
IMPORT_START_FRAME = 1
|
||||
|
||||
scn = context.scene
|
||||
scn.objects.selected = []
|
||||
|
||||
objects = []
|
||||
|
||||
def add_ob(name):
|
||||
ob = scn.objects.new('Empty', None)
|
||||
objects.append(ob)
|
||||
return ob
|
||||
|
||||
# Add objects
|
||||
for name, bvh_node in bvh_nodes.items():
|
||||
bvh_node.temp = add_ob(name)
|
||||
|
||||
# Parent the objects
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bvh_node.temp.makeParent([bvh_node_child.temp for bvh_node_child in bvh_node.children], 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||
|
||||
# Offset
|
||||
for bvh_node in bvh_nodes.values():
|
||||
# Make relative to parents offset
|
||||
bvh_node.temp.loc = bvh_node.rest_head_local
|
||||
|
||||
# Add tail objects
|
||||
for name, bvh_node in bvh_nodes.items():
|
||||
if not bvh_node.children:
|
||||
ob_end = add_ob(name + '_end')
|
||||
bvh_node.temp.makeParent([ob_end], 1, 0) # ojbs, noninverse, 1 = not fast.
|
||||
ob_end.loc = bvh_node.rest_tail_local
|
||||
|
||||
|
||||
# Animate the data, the last used bvh_node will do since they all have the same number of frames
|
||||
for frame_current in range(len(bvh_node.anim_data)):
|
||||
Blender.Set('curframe', frame_current + IMPORT_START_FRAME)
|
||||
|
||||
for bvh_node in bvh_nodes.values():
|
||||
lx, ly, lz, rx, ry, rz = bvh_node.anim_data[frame_current]
|
||||
|
||||
rest_head_local = bvh_node.rest_head_local
|
||||
bvh_node.temp.loc = rest_head_local + Vector((lx, ly, lz))
|
||||
|
||||
bvh_node.temp.rot = rx, ry, rz
|
||||
|
||||
bvh_node.temp.insertIpoKey(Blender.Object.IpoKeyTypes.LOCROT) # XXX invalid
|
||||
|
||||
scn.update(1)
|
||||
return objects
|
||||
|
||||
|
||||
def bvh_node_dict2armature(context, bvh_nodes, ROT_MODE='XYZ', IMPORT_START_FRAME=1, IMPORT_LOOP=False):
|
||||
|
||||
if IMPORT_START_FRAME < 1:
|
||||
IMPORT_START_FRAME = 1
|
||||
|
||||
# Add the new armature,
|
||||
scn = context.scene
|
||||
#XXX scn.objects.selected = []
|
||||
for ob in scn.objects:
|
||||
ob.select = False
|
||||
|
||||
scn.frame_set(IMPORT_START_FRAME)
|
||||
|
||||
arm_data = bpy.data.armatures.new("MyBVH")
|
||||
arm_ob = bpy.data.objects.new("MyBVH", arm_data)
|
||||
|
||||
scn.objects.link(arm_ob)
|
||||
|
||||
arm_ob.select = True
|
||||
scn.objects.active = arm_ob
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
||||
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
|
||||
|
||||
# Get the average bone length for zero length bones, we may not use this.
|
||||
average_bone_length = 0.0
|
||||
nonzero_count = 0
|
||||
for bvh_node in bvh_nodes.values():
|
||||
l = (bvh_node.rest_head_local - bvh_node.rest_tail_local).length
|
||||
if l:
|
||||
average_bone_length += l
|
||||
nonzero_count += 1
|
||||
|
||||
# Very rare cases all bones couldbe zero length???
|
||||
if not average_bone_length:
|
||||
average_bone_length = 0.1
|
||||
else:
|
||||
# Normal operation
|
||||
average_bone_length = average_bone_length / nonzero_count
|
||||
|
||||
# XXX, annoying, remove bone.
|
||||
while arm_data.edit_bones:
|
||||
arm_ob.edit_bones.remove(arm_data.edit_bones[-1])
|
||||
|
||||
ZERO_AREA_BONES = []
|
||||
for name, bvh_node in bvh_nodes.items():
|
||||
# New editbone
|
||||
bone = bvh_node.temp = arm_data.edit_bones.new(name)
|
||||
|
||||
bone.head = bvh_node.rest_head_world
|
||||
bone.tail = bvh_node.rest_tail_world
|
||||
|
||||
# ZERO AREA BONES.
|
||||
if (bone.head - bone.tail).length < 0.001:
|
||||
if bvh_node.parent:
|
||||
ofs = bvh_node.parent.rest_head_local - bvh_node.parent.rest_tail_local
|
||||
if ofs.length: # is our parent zero length also?? unlikely
|
||||
bone.tail = bone.tail + ofs
|
||||
else:
|
||||
bone.tail.y = bone.tail.y + average_bone_length
|
||||
else:
|
||||
bone.tail.y = bone.tail.y + average_bone_length
|
||||
|
||||
ZERO_AREA_BONES.append(bone.name)
|
||||
|
||||
|
||||
for bvh_node in bvh_nodes.values():
|
||||
if bvh_node.parent:
|
||||
# bvh_node.temp is the Editbone
|
||||
|
||||
# Set the bone parent
|
||||
bvh_node.temp.parent = bvh_node.parent.temp
|
||||
|
||||
# Set the connection state
|
||||
if not bvh_node.has_loc and\
|
||||
bvh_node.parent and\
|
||||
bvh_node.parent.temp.name not in ZERO_AREA_BONES and\
|
||||
bvh_node.parent.rest_tail_local == bvh_node.rest_head_local:
|
||||
bvh_node.temp.use_connect = True
|
||||
|
||||
# Replace the editbone with the editbone name,
|
||||
# to avoid memory errors accessing the editbone outside editmode
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bvh_node.temp = bvh_node.temp.name
|
||||
|
||||
#XXX arm_data.update()
|
||||
|
||||
# Now Apply the animation to the armature
|
||||
|
||||
# Get armature animation data
|
||||
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
||||
bpy.ops.object.mode_set(mode='POSE', toggle=False)
|
||||
|
||||
pose = arm_ob.pose
|
||||
pose_bones = pose.bones
|
||||
|
||||
if ROT_MODE == 'NATIVE':
|
||||
eul_order_lookup = {\
|
||||
(0, 1, 2): 'XYZ',
|
||||
(0, 2, 1): 'XZY',
|
||||
(1, 0, 2): 'YXZ',
|
||||
(1, 2, 0): 'YZX',
|
||||
(2, 0, 1): 'ZXY',
|
||||
(2, 1, 0): 'ZYX'}
|
||||
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
||||
pose_bone = pose_bones[bone_name]
|
||||
pose_bone.rotation_mode = eul_order_lookup[tuple(bvh_node.rot_order)]
|
||||
|
||||
elif ROT_MODE != 'QUATERNION':
|
||||
for pose_bone in pose_bones:
|
||||
pose_bone.rotation_mode = ROT_MODE
|
||||
else:
|
||||
# Quats default
|
||||
pass
|
||||
|
||||
context.scene.update()
|
||||
|
||||
bpy.ops.pose.select_all() # set
|
||||
bpy.ops.anim.keyframe_insert_menu(type=-4) # XXX - -4 ???
|
||||
|
||||
|
||||
#XXX action = Blender.Armature.NLA.NewAction("Action")
|
||||
#XXX action.setActive(arm_ob)
|
||||
|
||||
#bpy.ops.action.new()
|
||||
#action = bpy.data.actions[-1]
|
||||
|
||||
# arm_ob.animation_data.action = action
|
||||
action = arm_ob.animation_data.action
|
||||
|
||||
# Replace the bvh_node.temp (currently an editbone)
|
||||
# With a tuple (pose_bone, armature_bone, bone_rest_matrix, bone_rest_matrix_inv)
|
||||
for bvh_node in bvh_nodes.values():
|
||||
bone_name = bvh_node.temp # may not be the same name as the bvh_node, could have been shortened.
|
||||
pose_bone = pose_bones[bone_name]
|
||||
rest_bone = arm_data.bones[bone_name]
|
||||
bone_rest_matrix = rest_bone.matrix_local.rotation_part()
|
||||
|
||||
|
||||
bone_rest_matrix_inv = Matrix(bone_rest_matrix)
|
||||
bone_rest_matrix_inv.invert()
|
||||
|
||||
bone_rest_matrix_inv.resize4x4()
|
||||
bone_rest_matrix.resize4x4()
|
||||
bvh_node.temp = (pose_bone, bone, bone_rest_matrix, bone_rest_matrix_inv)
|
||||
|
||||
|
||||
# Make a dict for fast access without rebuilding a list all the time.
|
||||
|
||||
# KEYFRAME METHOD, SLOW, USE IPOS DIRECT
|
||||
# TODO: use f-point samples instead (Aligorith)
|
||||
|
||||
if ROT_MODE != 'QUATERNION':
|
||||
prev_euler = [Euler() for i in range(len(bvh_nodes))]
|
||||
|
||||
# Animate the data, the last used bvh_node will do since they all have the same number of frames
|
||||
for frame_current in range(len(bvh_node.anim_data)-1): # skip the first frame (rest frame)
|
||||
# print frame_current
|
||||
|
||||
# if frame_current==40: # debugging
|
||||
# break
|
||||
|
||||
# Dont neet to set the current frame
|
||||
for i, bvh_node in enumerate(bvh_nodes.values()):
|
||||
pose_bone, bone, bone_rest_matrix, bone_rest_matrix_inv = bvh_node.temp
|
||||
lx, ly, lz, rx, ry, rz = bvh_node.anim_data[frame_current + 1]
|
||||
|
||||
if bvh_node.has_rot:
|
||||
bone_rotation_matrix = Euler((rx, ry, rz)).to_matrix().resize4x4()
|
||||
bone_rotation_matrix = bone_rest_matrix_inv * bone_rotation_matrix * bone_rest_matrix
|
||||
|
||||
if ROT_MODE == 'QUATERNION':
|
||||
pose_bone.rotation_quaternion = bone_rotation_matrix.to_quat()
|
||||
else:
|
||||
euler = bone_rotation_matrix.to_euler(pose_bone.rotation_mode, prev_euler[i])
|
||||
pose_bone.rotation_euler = euler
|
||||
prev_euler[i] = euler
|
||||
|
||||
if bvh_node.has_loc:
|
||||
pose_bone.location = (bone_rest_matrix_inv * Matrix.Translation(Vector((lx, ly, lz)) - bvh_node.rest_head_local)).translation_part()
|
||||
|
||||
if bvh_node.has_loc:
|
||||
pose_bone.keyframe_insert("location")
|
||||
if bvh_node.has_rot:
|
||||
if ROT_MODE == 'QUATERNION':
|
||||
pose_bone.keyframe_insert("rotation_quaternion")
|
||||
else:
|
||||
pose_bone.keyframe_insert("rotation_euler")
|
||||
|
||||
|
||||
# bpy.ops.anim.keyframe_insert_menu(type=-4) # XXX - -4 ???
|
||||
bpy.ops.screen.frame_offset(delta=1)
|
||||
|
||||
for cu in action.fcurves:
|
||||
if IMPORT_LOOP:
|
||||
pass # 2.5 doenst have cyclic now?
|
||||
|
||||
for bez in cu.keyframe_points:
|
||||
bez.interpolation = 'LINEAR'
|
||||
|
||||
return arm_ob
|
||||
|
||||
|
||||
def load(operator, context, filepath="", rotate_mode='NATIVE', scale=1.0, use_cyclic=False, frame_start=1):
|
||||
import time
|
||||
t1 = time.time()
|
||||
print('\tparsing bvh %r...' % filepath, end="")
|
||||
|
||||
bvh_nodes = read_bvh(context, filepath,
|
||||
ROT_MODE=rotate_mode,
|
||||
GLOBAL_SCALE=scale)
|
||||
|
||||
print('%.4f' % (time.time() - t1))
|
||||
t1 = time.time()
|
||||
print('\timporting to blender...', end="")
|
||||
|
||||
bvh_node_dict2armature(context, bvh_nodes,
|
||||
ROT_MODE=rotate_mode,
|
||||
IMPORT_START_FRAME=frame_start,
|
||||
IMPORT_LOOP=use_cyclic)
|
||||
|
||||
print('Done in %.4f\n' % (time.time() - t1))
|
||||
|
||||
return {'FINISHED'}
|
@ -1,75 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
import sys
|
||||
reload(sys.modules.get("io_mesh_ply.export_ply", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ExportHelper
|
||||
|
||||
|
||||
class ExportPLY(bpy.types.Operator, ExportHelper):
|
||||
'''Export a single object as a stanford PLY with normals, colours and texture coordinates.'''
|
||||
bl_idname = "export.ply"
|
||||
bl_label = "Export PLY"
|
||||
|
||||
filename_ext = ".ply"
|
||||
|
||||
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply Modifiers to the exported mesh", default=True)
|
||||
use_normals = BoolProperty(name="Normals", description="Export Normals for smooth and hard shaded faces", default=True)
|
||||
use_uv_coords = BoolProperty(name="UVs", description="Exort the active UV layer", default=True)
|
||||
use_colors = BoolProperty(name="Vertex Colors", description="Exort the active vertex color layer", default=True)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
return context.active_object != None
|
||||
|
||||
def execute(self, context):
|
||||
filepath = self.filepath
|
||||
filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
|
||||
import io_mesh_ply.export_ply
|
||||
return io_mesh_ply.export_ply.save(self, context, **self.properties)
|
||||
|
||||
def draw(self, context):
|
||||
layout = self.layout
|
||||
|
||||
row = layout.row()
|
||||
row.prop(self, "use_modifiers")
|
||||
row.prop(self, "use_normals")
|
||||
row = layout.row()
|
||||
row.prop(self, "use_uv_coords")
|
||||
row.prop(self, "use_colors")
|
||||
|
||||
|
||||
def menu_func(self, context):
|
||||
self.layout.operator(ExportPLY.bl_idname, text="Stanford (.ply)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,206 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Copyright (C) 2004, 2005: Bruce Merry, bmerry@cs.uct.ac.za
|
||||
# Contributors: Bruce Merry, Campbell Barton
|
||||
|
||||
"""
|
||||
This script exports Stanford PLY files from Blender. It supports normals,
|
||||
colours, and texture coordinates per face or per vertex.
|
||||
Only one mesh can be exported at a time.
|
||||
"""
|
||||
|
||||
import bpy
|
||||
import os
|
||||
|
||||
|
||||
def save(operator, context, filepath="", use_modifiers=True, use_normals=True, use_uv_coords=True, use_colors=True):
|
||||
|
||||
def rvec3d(v):
|
||||
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
|
||||
|
||||
|
||||
def rvec2d(v):
|
||||
return round(v[0], 6), round(v[1], 6)
|
||||
|
||||
scene = context.scene
|
||||
obj = context.object
|
||||
|
||||
if not obj:
|
||||
raise Exception("Error, Select 1 active object")
|
||||
|
||||
file = open(filepath, 'w')
|
||||
|
||||
if scene.objects.active:
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
if use_modifiers:
|
||||
mesh = obj.create_mesh(scene, True, 'PREVIEW')
|
||||
else:
|
||||
mesh = obj.data
|
||||
|
||||
if not mesh:
|
||||
raise Exception("Error, could not get mesh data from active object")
|
||||
|
||||
# mesh.transform(obj.matrix_world) # XXX
|
||||
|
||||
faceUV = (len(mesh.uv_textures) > 0)
|
||||
vertexUV = (len(mesh.sticky) > 0)
|
||||
vertexColors = len(mesh.vertex_colors) > 0
|
||||
|
||||
if (not faceUV) and (not vertexUV):
|
||||
use_uv_coords = False
|
||||
if not vertexColors:
|
||||
use_colors = False
|
||||
|
||||
if not use_uv_coords:
|
||||
faceUV = vertexUV = False
|
||||
if not use_colors:
|
||||
vertexColors = False
|
||||
|
||||
if faceUV:
|
||||
active_uv_layer = mesh.uv_textures.active
|
||||
if not active_uv_layer:
|
||||
use_uv_coords = False
|
||||
faceUV = None
|
||||
else:
|
||||
active_uv_layer = active_uv_layer.data
|
||||
|
||||
if vertexColors:
|
||||
active_col_layer = mesh.vertex_colors.active
|
||||
if not active_col_layer:
|
||||
use_colors = False
|
||||
vertexColors = None
|
||||
else:
|
||||
active_col_layer = active_col_layer.data
|
||||
|
||||
# incase
|
||||
color = uvcoord = uvcoord_key = normal = normal_key = None
|
||||
|
||||
mesh_verts = mesh.vertices # save a lookup
|
||||
ply_verts = [] # list of dictionaries
|
||||
# vdict = {} # (index, normal, uv) -> new index
|
||||
vdict = [{} for i in range(len(mesh_verts))]
|
||||
ply_faces = [[] for f in range(len(mesh.faces))]
|
||||
vert_count = 0
|
||||
for i, f in enumerate(mesh.faces):
|
||||
|
||||
|
||||
smooth = f.use_smooth
|
||||
if not smooth:
|
||||
normal = tuple(f.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uv = active_uv_layer[i]
|
||||
uv = uv.uv1, uv.uv2, uv.uv3, uv.uv4 # XXX - crufty :/
|
||||
if vertexColors:
|
||||
col = active_col_layer[i]
|
||||
col = col.color1, col.color2, col.color3, col.color4
|
||||
|
||||
f_verts = f.vertices
|
||||
|
||||
pf = ply_faces[i]
|
||||
for j, vidx in enumerate(f_verts):
|
||||
v = mesh_verts[vidx]
|
||||
|
||||
if smooth:
|
||||
normal = tuple(v.normal)
|
||||
normal_key = rvec3d(normal)
|
||||
|
||||
if faceUV:
|
||||
uvcoord = uv[j][0], 1.0 - uv[j][1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
elif vertexUV:
|
||||
uvcoord = v.uvco[0], 1.0 - v.uvco[1]
|
||||
uvcoord_key = rvec2d(uvcoord)
|
||||
|
||||
if vertexColors:
|
||||
color = col[j]
|
||||
color = int(color[0] * 255.0), int(color[1] * 255.0), int(color[2] * 255.0)
|
||||
|
||||
|
||||
key = normal_key, uvcoord_key, color
|
||||
|
||||
vdict_local = vdict[vidx]
|
||||
pf_vidx = vdict_local.get(key) # Will be None initially
|
||||
|
||||
if pf_vidx is None: # same as vdict_local.has_key(key)
|
||||
pf_vidx = vdict_local[key] = vert_count
|
||||
ply_verts.append((vidx, normal, uvcoord, color))
|
||||
vert_count += 1
|
||||
|
||||
pf.append(pf_vidx)
|
||||
|
||||
file.write('ply\n')
|
||||
file.write('format ascii 1.0\n')
|
||||
file.write('comment Created by Blender %s - www.blender.org, source file: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
||||
|
||||
file.write('element vertex %d\n' % len(ply_verts))
|
||||
|
||||
file.write('property float x\n')
|
||||
file.write('property float y\n')
|
||||
file.write('property float z\n')
|
||||
|
||||
if use_normals:
|
||||
file.write('property float nx\n')
|
||||
file.write('property float ny\n')
|
||||
file.write('property float nz\n')
|
||||
if use_uv_coords:
|
||||
file.write('property float s\n')
|
||||
file.write('property float t\n')
|
||||
if use_colors:
|
||||
file.write('property uchar red\n')
|
||||
file.write('property uchar green\n')
|
||||
file.write('property uchar blue\n')
|
||||
|
||||
file.write('element face %d\n' % len(mesh.faces))
|
||||
file.write('property list uchar uint vertex_indices\n')
|
||||
file.write('end_header\n')
|
||||
|
||||
for i, v in enumerate(ply_verts):
|
||||
file.write('%.6f %.6f %.6f ' % tuple(mesh_verts[v[0]].co)) # co
|
||||
if use_normals:
|
||||
file.write('%.6f %.6f %.6f ' % v[1]) # no
|
||||
if use_uv_coords:
|
||||
file.write('%.6f %.6f ' % v[2]) # uv
|
||||
if use_colors:
|
||||
file.write('%u %u %u' % v[3]) # col
|
||||
file.write('\n')
|
||||
|
||||
for pf in ply_faces:
|
||||
if len(pf) == 3:
|
||||
file.write('3 %d %d %d\n' % tuple(pf))
|
||||
else:
|
||||
file.write('4 %d %d %d %d\n' % tuple(pf))
|
||||
|
||||
file.close()
|
||||
print("writing %r done" % filepath)
|
||||
|
||||
if use_modifiers:
|
||||
bpy.data.meshes.remove(mesh)
|
||||
|
||||
# XXX
|
||||
"""
|
||||
if is_editmode:
|
||||
Blender.Window.EditMode(1, '', 0)
|
||||
"""
|
||||
|
||||
return {'FINISHED'}
|
@ -1,84 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
import sys
|
||||
reload(sys.modules.get("io_scene_3ds.import_3ds", sys))
|
||||
reload(sys.modules.get("io_scene_3ds.export_3ds", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ImportHelper, ExportHelper
|
||||
|
||||
|
||||
class Import3DS(bpy.types.Operator, ImportHelper):
|
||||
'''Import from 3DS file format (.3ds)'''
|
||||
bl_idname = "import_scene.autodesk_3ds"
|
||||
bl_label = 'Import 3DS'
|
||||
|
||||
filename_ext = ".3ds"
|
||||
filter_glob = StringProperty(default="*.3ds", options={'HIDDEN'})
|
||||
|
||||
constrain_size = FloatProperty(name="Size Constraint", description="Scale the model by 10 until it reacehs the size constraint. Zero Disables.", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=10.0)
|
||||
use_image_search = BoolProperty(name="Image Search", description="Search subdirectories for any assosiated images (Warning, may be slow)", default=True)
|
||||
use_apply_transform = BoolProperty(name="Apply Transform", description="Workaround for object transformations importing incorrectly", default=False)
|
||||
|
||||
def execute(self, context):
|
||||
import io_scene_3ds.import_3ds
|
||||
return io_scene_3ds.import_3ds.load(self, context, **self.properties)
|
||||
|
||||
|
||||
class Export3DS(bpy.types.Operator, ExportHelper):
|
||||
'''Export to 3DS file format (.3ds)'''
|
||||
bl_idname = "export_scene.autodesk_3ds"
|
||||
bl_label = 'Export 3DS'
|
||||
|
||||
filename_ext = ".3ds"
|
||||
|
||||
def execute(self, context):
|
||||
import io_scene_3ds.export_3ds
|
||||
return io_scene_3ds.export_3ds.save(self, context, **self.properties)
|
||||
|
||||
|
||||
# Add to a menu
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||
|
||||
# NOTES:
|
||||
# why add 1 extra vertex? and remove it when done? - "Answer - eekadoodle - would need to re-order UV's without this since face order isnt always what we give blender, BMesh will solve :D"
|
||||
# disabled scaling to size, this requires exposing bb (easy) and understanding how it works (needs some time)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@ -1,794 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Script copyright (C) Bob Holcomb
|
||||
# Contributors: Bob Holcomb, Richard L?rk?ng, Damien McGinnes, Campbell Barton, Mario Lapin
|
||||
|
||||
import os
|
||||
import time
|
||||
import struct
|
||||
|
||||
from io_utils import load_image
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
BOUNDS_3DS = []
|
||||
|
||||
|
||||
######################################################
|
||||
# Data Structures
|
||||
######################################################
|
||||
|
||||
#Some of the chunks that we will see
|
||||
#----- Primary Chunk, at the beginning of each file
|
||||
PRIMARY = int('0x4D4D',16)
|
||||
|
||||
#------ Main Chunks
|
||||
OBJECTINFO = 0x3D3D #This gives the version of the mesh and is found right before the material and object information
|
||||
VERSION = 0x0002 #This gives the version of the .3ds file
|
||||
EDITKEYFRAME= 0xB000 #This is the header for all of the key frame info
|
||||
|
||||
#------ sub defines of OBJECTINFO
|
||||
MATERIAL = 45055 #0xAFFF // This stored the texture info
|
||||
OBJECT = 16384 #0x4000 // This stores the faces, vertices, etc...
|
||||
|
||||
#>------ sub defines of MATERIAL
|
||||
#------ sub defines of MATERIAL_BLOCK
|
||||
MAT_NAME = 0xA000 # This holds the material name
|
||||
MAT_AMBIENT = 0xA010 # Ambient color of the object/material
|
||||
MAT_DIFFUSE = 0xA020 # This holds the color of the object/material
|
||||
MAT_SPECULAR = 0xA030 # SPecular color of the object/material
|
||||
MAT_SHINESS = 0xA040 # ??
|
||||
MAT_TRANSPARENCY= 0xA050 # Transparency value of material
|
||||
MAT_SELF_ILLUM = 0xA080 # Self Illumination value of material
|
||||
MAT_WIRE = 0xA085 # Only render's wireframe
|
||||
|
||||
MAT_TEXTURE_MAP = 0xA200 # This is a header for a new texture map
|
||||
MAT_SPECULAR_MAP= 0xA204 # This is a header for a new specular map
|
||||
MAT_OPACITY_MAP = 0xA210 # This is a header for a new opacity map
|
||||
MAT_REFLECTION_MAP= 0xA220 # This is a header for a new reflection map
|
||||
MAT_BUMP_MAP = 0xA230 # This is a header for a new bump map
|
||||
MAT_MAP_FILEPATH = 0xA300 # This holds the file name of the texture
|
||||
|
||||
MAT_FLOAT_COLOR = 0x0010 #color defined as 3 floats
|
||||
MAT_24BIT_COLOR = 0x0011 #color defined as 3 bytes
|
||||
|
||||
#>------ sub defines of OBJECT
|
||||
OBJECT_MESH = 0x4100 # This lets us know that we are reading a new object
|
||||
OBJECT_LAMP = 0x4600 # This lets un know we are reading a light object
|
||||
OBJECT_LAMP_SPOT = 0x4610 # The light is a spotloght.
|
||||
OBJECT_LAMP_OFF = 0x4620 # The light off.
|
||||
OBJECT_LAMP_ATTENUATE = 0x4625
|
||||
OBJECT_LAMP_RAYSHADE = 0x4627
|
||||
OBJECT_LAMP_SHADOWED = 0x4630
|
||||
OBJECT_LAMP_LOCAL_SHADOW = 0x4640
|
||||
OBJECT_LAMP_LOCAL_SHADOW2 = 0x4641
|
||||
OBJECT_LAMP_SEE_CONE = 0x4650
|
||||
OBJECT_LAMP_SPOT_RECTANGULAR = 0x4651
|
||||
OBJECT_LAMP_SPOT_OVERSHOOT = 0x4652
|
||||
OBJECT_LAMP_SPOT_PROJECTOR = 0x4653
|
||||
OBJECT_LAMP_EXCLUDE = 0x4654
|
||||
OBJECT_LAMP_RANGE = 0x4655
|
||||
OBJECT_LAMP_ROLL = 0x4656
|
||||
OBJECT_LAMP_SPOT_ASPECT = 0x4657
|
||||
OBJECT_LAMP_RAY_BIAS = 0x4658
|
||||
OBJECT_LAMP_INNER_RANGE = 0x4659
|
||||
OBJECT_LAMP_OUTER_RANGE = 0x465A
|
||||
OBJECT_LAMP_MULTIPLIER = 0x465B
|
||||
OBJECT_LAMP_AMBIENT_LIGHT = 0x4680
|
||||
|
||||
|
||||
|
||||
OBJECT_CAMERA= 0x4700 # This lets un know we are reading a camera object
|
||||
|
||||
#>------ sub defines of CAMERA
|
||||
OBJECT_CAM_RANGES= 0x4720 # The camera range values
|
||||
|
||||
#>------ sub defines of OBJECT_MESH
|
||||
OBJECT_VERTICES = 0x4110 # The objects vertices
|
||||
OBJECT_FACES = 0x4120 # The objects faces
|
||||
OBJECT_MATERIAL = 0x4130 # This is found if the object has a material, either texture map or color
|
||||
OBJECT_UV = 0x4140 # The UV texture coordinates
|
||||
OBJECT_TRANS_MATRIX = 0x4160 # The Object Matrix
|
||||
|
||||
global scn
|
||||
scn = None
|
||||
|
||||
#the chunk class
|
||||
class chunk:
|
||||
ID = 0
|
||||
length = 0
|
||||
bytes_read = 0
|
||||
|
||||
#we don't read in the bytes_read, we compute that
|
||||
binary_format='<HI'
|
||||
|
||||
def __init__(self):
|
||||
self.ID = 0
|
||||
self.length = 0
|
||||
self.bytes_read = 0
|
||||
|
||||
def dump(self):
|
||||
print('ID: ', self.ID)
|
||||
print('ID in hex: ', hex(self.ID))
|
||||
print('length: ', self.length)
|
||||
print('bytes_read: ', self.bytes_read)
|
||||
|
||||
def read_chunk(file, chunk):
|
||||
temp_data = file.read(struct.calcsize(chunk.binary_format))
|
||||
data = struct.unpack(chunk.binary_format, temp_data)
|
||||
chunk.ID = data[0]
|
||||
chunk.length = data[1]
|
||||
#update the bytes read function
|
||||
chunk.bytes_read = 6
|
||||
|
||||
#if debugging
|
||||
#chunk.dump()
|
||||
|
||||
def read_string(file):
|
||||
#read in the characters till we get a null character
|
||||
s = b''
|
||||
while not s.endswith(b'\x00'):
|
||||
s += struct.unpack('<c', file.read(1))[0]
|
||||
#print 'string: ',s
|
||||
|
||||
#remove the null character from the string
|
||||
s = str(s[:-1], 'ASCII')
|
||||
# print("read string", s)
|
||||
return s
|
||||
|
||||
######################################################
|
||||
# IMPORT
|
||||
######################################################
|
||||
def process_next_object_chunk(file, previous_chunk):
|
||||
new_chunk = chunk()
|
||||
temp_chunk = chunk()
|
||||
|
||||
while (previous_chunk.bytes_read < previous_chunk.length):
|
||||
#read the next chunk
|
||||
read_chunk(file, new_chunk)
|
||||
|
||||
def skip_to_end(file, skip_chunk):
|
||||
buffer_size = skip_chunk.length - skip_chunk.bytes_read
|
||||
binary_format='%ic' % buffer_size
|
||||
temp_data = file.read(struct.calcsize(binary_format))
|
||||
skip_chunk.bytes_read += buffer_size
|
||||
|
||||
|
||||
def add_texture_to_material(image, texture, material, mapto):
|
||||
#print('assigning %s to %s' % (texture, material))
|
||||
|
||||
if mapto not in ("COLOR", "SPECULARITY", "ALPHA", "NORMAL"):
|
||||
print('/tError: Cannot map to "%s"\n\tassuming diffuse color. modify material "%s" later.' % (mapto, material.name))
|
||||
mapto = "COLOR"
|
||||
|
||||
if image:
|
||||
texture.image = image
|
||||
|
||||
mtex = material.texture_slots.add()
|
||||
mtex.texture = texture
|
||||
mtex.texture_coords = 'UV'
|
||||
mtex.use_map_color_diffuse = False
|
||||
|
||||
if mapto == 'COLOR':
|
||||
mtex.use_map_color_diffuse = True
|
||||
elif mapto == 'SPECULARITY':
|
||||
mtex.use_map_specular = True
|
||||
elif mapto == 'ALPHA':
|
||||
mtex.use_map_alpha = True
|
||||
elif mapto == 'NORMAL':
|
||||
mtex.use_map_normal = True
|
||||
|
||||
|
||||
def process_next_chunk(file, previous_chunk, importedObjects, IMAGE_SEARCH):
|
||||
#print previous_chunk.bytes_read, 'BYTES READ'
|
||||
contextObName = None
|
||||
contextLamp = [None, None] # object, Data
|
||||
contextMaterial = None
|
||||
contextMatrix_rot = None # Blender.mathutils.Matrix(); contextMatrix.identity()
|
||||
#contextMatrix_tx = None # Blender.mathutils.Matrix(); contextMatrix.identity()
|
||||
contextMesh_vertls = None # flat array: (verts * 3)
|
||||
contextMesh_facels = None
|
||||
contextMeshMaterials = {} # matname:[face_idxs]
|
||||
contextMeshUV = None # flat array (verts * 2)
|
||||
|
||||
TEXTURE_DICT = {}
|
||||
MATDICT = {}
|
||||
# TEXMODE = Mesh.FaceModes['TEX']
|
||||
|
||||
# Localspace variable names, faster.
|
||||
STRUCT_SIZE_1CHAR = struct.calcsize('c')
|
||||
STRUCT_SIZE_2FLOAT = struct.calcsize('2f')
|
||||
STRUCT_SIZE_3FLOAT = struct.calcsize('3f')
|
||||
STRUCT_SIZE_UNSIGNED_SHORT = struct.calcsize('H')
|
||||
STRUCT_SIZE_4UNSIGNED_SHORT = struct.calcsize('4H')
|
||||
STRUCT_SIZE_4x3MAT = struct.calcsize('ffffffffffff')
|
||||
_STRUCT_SIZE_4x3MAT = struct.calcsize('fffffffffffff')
|
||||
# STRUCT_SIZE_4x3MAT = calcsize('ffffffffffff')
|
||||
# print STRUCT_SIZE_4x3MAT, ' STRUCT_SIZE_4x3MAT'
|
||||
|
||||
def putContextMesh(myContextMesh_vertls, myContextMesh_facels, myContextMeshMaterials):
|
||||
|
||||
bmesh = bpy.data.meshes.new(contextObName)
|
||||
if myContextMesh_vertls:
|
||||
|
||||
bmesh.vertices.add(len(myContextMesh_vertls)//3)
|
||||
bmesh.faces.add(len(myContextMesh_facels))
|
||||
bmesh.vertices.foreach_set("co", myContextMesh_vertls)
|
||||
|
||||
eekadoodle_faces = []
|
||||
for v1, v2, v3 in myContextMesh_facels:
|
||||
eekadoodle_faces.extend([v3, v1, v2, 0] if v3 == 0 else [v1, v2, v3, 0])
|
||||
bmesh.faces.foreach_set("vertices_raw", eekadoodle_faces)
|
||||
|
||||
if bmesh.faces and contextMeshUV:
|
||||
bmesh.uv_textures.new()
|
||||
uv_faces = bmesh.uv_textures.active.data[:]
|
||||
else:
|
||||
uv_faces = None
|
||||
|
||||
for mat_idx, (matName, faces) in enumerate(myContextMeshMaterials.items()):
|
||||
if matName is None:
|
||||
bmat = None
|
||||
else:
|
||||
bmat = MATDICT[matName][1]
|
||||
img = TEXTURE_DICT.get(bmat.name)
|
||||
|
||||
bmesh.materials.append(bmat) # can be None
|
||||
|
||||
if uv_faces and img:
|
||||
for fidx in faces:
|
||||
bmesh.faces[fidx].material_index = mat_idx
|
||||
uf = uv_faces[fidx]
|
||||
uf.image = img
|
||||
uf.use_image = True
|
||||
else:
|
||||
for fidx in faces:
|
||||
bmesh.faces[fidx].material_index = mat_idx
|
||||
|
||||
if uv_faces:
|
||||
for fidx, uf in enumerate(uv_faces):
|
||||
face = myContextMesh_facels[fidx]
|
||||
v1, v2, v3 = face
|
||||
|
||||
# eekadoodle
|
||||
if v3 == 0:
|
||||
v1, v2, v3 = v3, v1, v2
|
||||
|
||||
uf.uv1 = contextMeshUV[v1 * 2:(v1 * 2) + 2]
|
||||
uf.uv2 = contextMeshUV[v2 * 2:(v2 * 2) + 2]
|
||||
uf.uv3 = contextMeshUV[v3 * 2:(v3 * 2) + 2]
|
||||
# always a tri
|
||||
|
||||
ob = bpy.data.objects.new(tempName, bmesh)
|
||||
SCN.objects.link(ob)
|
||||
|
||||
'''
|
||||
if contextMatrix_tx:
|
||||
ob.setMatrix(contextMatrix_tx)
|
||||
'''
|
||||
|
||||
if contextMatrix_rot:
|
||||
ob.matrix_world = contextMatrix_rot
|
||||
|
||||
importedObjects.append(ob)
|
||||
bmesh.update()
|
||||
|
||||
#a spare chunk
|
||||
new_chunk = chunk()
|
||||
temp_chunk = chunk()
|
||||
|
||||
CreateBlenderObject = False
|
||||
|
||||
def read_float_color(temp_chunk):
|
||||
temp_data = file.read(struct.calcsize('3f'))
|
||||
temp_chunk.bytes_read += 12
|
||||
return [float(col) for col in struct.unpack('<3f', temp_data)]
|
||||
|
||||
def read_byte_color(temp_chunk):
|
||||
temp_data = file.read(struct.calcsize('3B'))
|
||||
temp_chunk.bytes_read += 3
|
||||
return [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
|
||||
|
||||
def read_texture(new_chunk, temp_chunk, name, mapto):
|
||||
new_texture = bpy.data.textures.new(name, type='IMAGE')
|
||||
|
||||
img = None
|
||||
while (new_chunk.bytes_read < new_chunk.length):
|
||||
#print 'MAT_TEXTURE_MAP..while', new_chunk.bytes_read, new_chunk.length
|
||||
read_chunk(file, temp_chunk)
|
||||
|
||||
if (temp_chunk.ID == MAT_MAP_FILEPATH):
|
||||
texture_name = read_string(file)
|
||||
img = TEXTURE_DICT[contextMaterial.name] = load_image(texture_name, dirname)
|
||||
new_chunk.bytes_read += (len(texture_name)+1) #plus one for the null character that gets removed
|
||||
|
||||
else:
|
||||
skip_to_end(file, temp_chunk)
|
||||
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
# add the map to the material in the right channel
|
||||
if img:
|
||||
add_texture_to_material(img, new_texture, contextMaterial, mapto)
|
||||
|
||||
dirname = os.path.dirname(file.name)
|
||||
|
||||
#loop through all the data for this chunk (previous chunk) and see what it is
|
||||
while (previous_chunk.bytes_read < previous_chunk.length):
|
||||
#print '\t', previous_chunk.bytes_read, 'keep going'
|
||||
#read the next chunk
|
||||
#print 'reading a chunk'
|
||||
read_chunk(file, new_chunk)
|
||||
|
||||
#is it a Version chunk?
|
||||
if (new_chunk.ID == VERSION):
|
||||
#print 'if (new_chunk.ID == VERSION):'
|
||||
#print 'found a VERSION chunk'
|
||||
#read in the version of the file
|
||||
#it's an unsigned short (H)
|
||||
temp_data = file.read(struct.calcsize('I'))
|
||||
version = struct.unpack('<I', temp_data)[0]
|
||||
new_chunk.bytes_read += 4 #read the 4 bytes for the version number
|
||||
#this loader works with version 3 and below, but may not with 4 and above
|
||||
if (version > 3):
|
||||
print('\tNon-Fatal Error: Version greater than 3, may not load correctly: ', version)
|
||||
|
||||
#is it an object info chunk?
|
||||
elif (new_chunk.ID == OBJECTINFO):
|
||||
#print 'elif (new_chunk.ID == OBJECTINFO):'
|
||||
# print 'found an OBJECTINFO chunk'
|
||||
process_next_chunk(file, new_chunk, importedObjects, IMAGE_SEARCH)
|
||||
|
||||
#keep track of how much we read in the main chunk
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
#is it an object chunk?
|
||||
elif (new_chunk.ID == OBJECT):
|
||||
|
||||
if CreateBlenderObject:
|
||||
putContextMesh(contextMesh_vertls, contextMesh_facels, contextMeshMaterials)
|
||||
contextMesh_vertls = []; contextMesh_facels = []
|
||||
|
||||
## preparando para receber o proximo objeto
|
||||
contextMeshMaterials = {} # matname:[face_idxs]
|
||||
contextMeshUV = None
|
||||
#contextMesh.vertexUV = 1 # Make sticky coords.
|
||||
# Reset matrix
|
||||
contextMatrix_rot = None
|
||||
#contextMatrix_tx = None
|
||||
|
||||
CreateBlenderObject = True
|
||||
tempName = read_string(file)
|
||||
contextObName = tempName
|
||||
new_chunk.bytes_read += len(tempName)+1
|
||||
|
||||
#is it a material chunk?
|
||||
elif (new_chunk.ID == MATERIAL):
|
||||
|
||||
# print("read material")
|
||||
|
||||
#print 'elif (new_chunk.ID == MATERIAL):'
|
||||
contextMaterial = bpy.data.materials.new('Material')
|
||||
|
||||
elif (new_chunk.ID == MAT_NAME):
|
||||
#print 'elif (new_chunk.ID == MAT_NAME):'
|
||||
material_name = read_string(file)
|
||||
|
||||
# print("material name", material_name)
|
||||
|
||||
#plus one for the null character that ended the string
|
||||
new_chunk.bytes_read += len(material_name)+1
|
||||
|
||||
contextMaterial.name = material_name.rstrip() # remove trailing whitespace
|
||||
MATDICT[material_name]= (contextMaterial.name, contextMaterial)
|
||||
|
||||
elif (new_chunk.ID == MAT_AMBIENT):
|
||||
#print 'elif (new_chunk.ID == MAT_AMBIENT):'
|
||||
read_chunk(file, temp_chunk)
|
||||
if (temp_chunk.ID == MAT_FLOAT_COLOR):
|
||||
contextMaterial.mirror_color = read_float_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3f'))
|
||||
# temp_chunk.bytes_read += 12
|
||||
# contextMaterial.mirCol = [float(col) for col in struct.unpack('<3f', temp_data)]
|
||||
elif (temp_chunk.ID == MAT_24BIT_COLOR):
|
||||
contextMaterial.mirror_color = read_byte_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3B'))
|
||||
# temp_chunk.bytes_read += 3
|
||||
# contextMaterial.mirCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
|
||||
else:
|
||||
skip_to_end(file, temp_chunk)
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
elif (new_chunk.ID == MAT_DIFFUSE):
|
||||
#print 'elif (new_chunk.ID == MAT_DIFFUSE):'
|
||||
read_chunk(file, temp_chunk)
|
||||
if (temp_chunk.ID == MAT_FLOAT_COLOR):
|
||||
contextMaterial.diffuse_color = read_float_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3f'))
|
||||
# temp_chunk.bytes_read += 12
|
||||
# contextMaterial.rgbCol = [float(col) for col in struct.unpack('<3f', temp_data)]
|
||||
elif (temp_chunk.ID == MAT_24BIT_COLOR):
|
||||
contextMaterial.diffuse_color = read_byte_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3B'))
|
||||
# temp_chunk.bytes_read += 3
|
||||
# contextMaterial.rgbCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
|
||||
else:
|
||||
skip_to_end(file, temp_chunk)
|
||||
|
||||
# print("read material diffuse color", contextMaterial.diffuse_color)
|
||||
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
elif (new_chunk.ID == MAT_SPECULAR):
|
||||
#print 'elif (new_chunk.ID == MAT_SPECULAR):'
|
||||
read_chunk(file, temp_chunk)
|
||||
if (temp_chunk.ID == MAT_FLOAT_COLOR):
|
||||
contextMaterial.specular_color = read_float_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3f'))
|
||||
# temp_chunk.bytes_read += 12
|
||||
# contextMaterial.mirCol = [float(col) for col in struct.unpack('<3f', temp_data)]
|
||||
elif (temp_chunk.ID == MAT_24BIT_COLOR):
|
||||
contextMaterial.specular_color = read_byte_color(temp_chunk)
|
||||
# temp_data = file.read(struct.calcsize('3B'))
|
||||
# temp_chunk.bytes_read += 3
|
||||
# contextMaterial.mirCol = [float(col)/255 for col in struct.unpack('<3B', temp_data)] # data [0,1,2] == rgb
|
||||
else:
|
||||
skip_to_end(file, temp_chunk)
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
elif (new_chunk.ID == MAT_TEXTURE_MAP):
|
||||
read_texture(new_chunk, temp_chunk, "Diffuse", "COLOR")
|
||||
|
||||
elif (new_chunk.ID == MAT_SPECULAR_MAP):
|
||||
read_texture(new_chunk, temp_chunk, "Specular", "SPECULARITY")
|
||||
|
||||
elif (new_chunk.ID == MAT_OPACITY_MAP):
|
||||
read_texture(new_chunk, temp_chunk, "Opacity", "ALPHA")
|
||||
|
||||
elif (new_chunk.ID == MAT_BUMP_MAP):
|
||||
read_texture(new_chunk, temp_chunk, "Bump", "NORMAL")
|
||||
|
||||
elif (new_chunk.ID == MAT_TRANSPARENCY):
|
||||
#print 'elif (new_chunk.ID == MAT_TRANSPARENCY):'
|
||||
read_chunk(file, temp_chunk)
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
|
||||
|
||||
temp_chunk.bytes_read += 2
|
||||
contextMaterial.alpha = 1-(float(struct.unpack('<H', temp_data)[0])/100)
|
||||
new_chunk.bytes_read += temp_chunk.bytes_read
|
||||
|
||||
|
||||
elif (new_chunk.ID == OBJECT_LAMP): # Basic lamp support.
|
||||
|
||||
temp_data = file.read(STRUCT_SIZE_3FLOAT)
|
||||
|
||||
x,y,z = struct.unpack('<3f', temp_data)
|
||||
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT
|
||||
|
||||
ob = bpy.data.objects.new("Lamp", bpy.data.lamps.new("Lamp"))
|
||||
SCN.objects.link(ob)
|
||||
|
||||
contextLamp[1]= ob.data
|
||||
# contextLamp[1]= bpy.data.lamps.new()
|
||||
contextLamp[0]= ob
|
||||
# contextLamp[0]= SCN_OBJECTS.new(contextLamp[1])
|
||||
importedObjects.append(contextLamp[0])
|
||||
|
||||
#print 'number of faces: ', num_faces
|
||||
#print x,y,z
|
||||
contextLamp[0].location = (x, y, z)
|
||||
# contextLamp[0].setLocation(x,y,z)
|
||||
|
||||
# Reset matrix
|
||||
contextMatrix_rot = None
|
||||
#contextMatrix_tx = None
|
||||
#print contextLamp.name,
|
||||
|
||||
elif (new_chunk.ID == OBJECT_MESH):
|
||||
# print 'Found an OBJECT_MESH chunk'
|
||||
pass
|
||||
elif (new_chunk.ID == OBJECT_VERTICES):
|
||||
'''
|
||||
Worldspace vertex locations
|
||||
'''
|
||||
# print 'elif (new_chunk.ID == OBJECT_VERTICES):'
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
|
||||
num_verts = struct.unpack('<H', temp_data)[0]
|
||||
new_chunk.bytes_read += 2
|
||||
|
||||
# print 'number of verts: ', num_verts
|
||||
contextMesh_vertls = struct.unpack('<%df' % (num_verts * 3), file.read(STRUCT_SIZE_3FLOAT * num_verts))
|
||||
new_chunk.bytes_read += STRUCT_SIZE_3FLOAT * num_verts
|
||||
# dummyvert is not used atm!
|
||||
|
||||
#print 'object verts: bytes read: ', new_chunk.bytes_read
|
||||
|
||||
elif (new_chunk.ID == OBJECT_FACES):
|
||||
# print 'elif (new_chunk.ID == OBJECT_FACES):'
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
|
||||
num_faces = struct.unpack('<H', temp_data)[0]
|
||||
new_chunk.bytes_read += 2
|
||||
#print 'number of faces: ', num_faces
|
||||
|
||||
# print '\ngetting a face'
|
||||
temp_data = file.read(STRUCT_SIZE_4UNSIGNED_SHORT * num_faces)
|
||||
new_chunk.bytes_read += STRUCT_SIZE_4UNSIGNED_SHORT * num_faces #4 short ints x 2 bytes each
|
||||
contextMesh_facels = struct.unpack('<%dH' % (num_faces * 4), temp_data)
|
||||
contextMesh_facels = [contextMesh_facels[i - 3:i] for i in range(3, (num_faces * 4) + 3, 4)]
|
||||
|
||||
elif (new_chunk.ID == OBJECT_MATERIAL):
|
||||
# print 'elif (new_chunk.ID == OBJECT_MATERIAL):'
|
||||
material_name = read_string(file)
|
||||
new_chunk.bytes_read += len(material_name)+1 # remove 1 null character.
|
||||
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
|
||||
num_faces_using_mat = struct.unpack('<H', temp_data)[0]
|
||||
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT
|
||||
|
||||
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT * num_faces_using_mat)
|
||||
new_chunk.bytes_read += STRUCT_SIZE_UNSIGNED_SHORT * num_faces_using_mat
|
||||
|
||||
contextMeshMaterials[material_name]= struct.unpack("<%dH" % (num_faces_using_mat), temp_data)
|
||||
|
||||
#look up the material in all the materials
|
||||
|
||||
elif (new_chunk.ID == OBJECT_UV):
|
||||
temp_data = file.read(STRUCT_SIZE_UNSIGNED_SHORT)
|
||||
num_uv = struct.unpack('<H', temp_data)[0]
|
||||
new_chunk.bytes_read += 2
|
||||
|
||||
temp_data = file.read(STRUCT_SIZE_2FLOAT * num_uv)
|
||||
new_chunk.bytes_read += STRUCT_SIZE_2FLOAT * num_uv
|
||||
contextMeshUV = struct.unpack('<%df' % (num_uv * 2), temp_data)
|
||||
|
||||
elif (new_chunk.ID == OBJECT_TRANS_MATRIX):
|
||||
# How do we know the matrix size? 54 == 4x4 48 == 4x3
|
||||
temp_data = file.read(STRUCT_SIZE_4x3MAT)
|
||||
data = list( struct.unpack('<ffffffffffff', temp_data) )
|
||||
new_chunk.bytes_read += STRUCT_SIZE_4x3MAT
|
||||
|
||||
contextMatrix_rot = mathutils.Matrix(\
|
||||
data[:3] + [0],\
|
||||
data[3:6] + [0],\
|
||||
data[6:9] + [0],\
|
||||
data[9:] + [1])
|
||||
|
||||
|
||||
'''
|
||||
contextMatrix_rot = Blender.mathutils.Matrix(\
|
||||
data[:3] + [0],\
|
||||
data[3:6] + [0],\
|
||||
data[6:9] + [0],\
|
||||
[0,0,0,1])
|
||||
'''
|
||||
|
||||
'''
|
||||
contextMatrix_rot = Blender.mathutils.Matrix(\
|
||||
data[:3] ,\
|
||||
data[3:6],\
|
||||
data[6:9])
|
||||
'''
|
||||
|
||||
'''
|
||||
contextMatrix_rot = Blender.mathutils.Matrix()
|
||||
m = 0
|
||||
for j in xrange(4):
|
||||
for i in xrange(3):
|
||||
contextMatrix_rot[j][i] = data[m]
|
||||
m += 1
|
||||
|
||||
contextMatrix_rot[0][3]=0;
|
||||
contextMatrix_rot[1][3]=0;
|
||||
contextMatrix_rot[2][3]=0;
|
||||
contextMatrix_rot[3][3]=1;
|
||||
'''
|
||||
|
||||
#contextMatrix_rot.resize4x4()
|
||||
#print "MTX"
|
||||
#print contextMatrix_rot
|
||||
contextMatrix_rot.invert()
|
||||
#print contextMatrix_rot
|
||||
#contextMatrix_tx = mathutils.Matrix.Translation(0.5 * Blender.mathutils.Vector(data[9:]))
|
||||
#contextMatrix_tx.invert()
|
||||
|
||||
#tx.invert()
|
||||
|
||||
#contextMatrix = contextMatrix * tx
|
||||
#contextMatrix = contextMatrix *tx
|
||||
|
||||
elif (new_chunk.ID == MAT_MAP_FILEPATH):
|
||||
texture_name = read_string(file)
|
||||
try:
|
||||
TEXTURE_DICT[contextMaterial.name]
|
||||
except:
|
||||
#img = TEXTURE_DICT[contextMaterial.name]= BPyImage.comprehensiveImageLoad(texture_name, FILEPATH)
|
||||
img = TEXTURE_DICT[contextMaterial.name] = load_image(texture_name, dirname)
|
||||
# img = TEXTURE_DICT[contextMaterial.name]= BPyImage.comprehensiveImageLoad(texture_name, FILEPATH, PLACE_HOLDER=False, RECURSIVE=IMAGE_SEARCH)
|
||||
|
||||
new_chunk.bytes_read += len(texture_name)+1 #plus one for the null character that gets removed
|
||||
|
||||
else: #(new_chunk.ID!=VERSION or new_chunk.ID!=OBJECTINFO or new_chunk.ID!=OBJECT or new_chunk.ID!=MATERIAL):
|
||||
# print 'skipping to end of this chunk'
|
||||
buffer_size = new_chunk.length - new_chunk.bytes_read
|
||||
binary_format='%ic' % buffer_size
|
||||
temp_data = file.read(struct.calcsize(binary_format))
|
||||
new_chunk.bytes_read += buffer_size
|
||||
|
||||
|
||||
#update the previous chunk bytes read
|
||||
# print 'previous_chunk.bytes_read += new_chunk.bytes_read'
|
||||
# print previous_chunk.bytes_read, new_chunk.bytes_read
|
||||
previous_chunk.bytes_read += new_chunk.bytes_read
|
||||
## print 'Bytes left in this chunk: ', previous_chunk.length - previous_chunk.bytes_read
|
||||
|
||||
# FINISHED LOOP
|
||||
# There will be a number of objects still not added
|
||||
if CreateBlenderObject:
|
||||
putContextMesh(contextMesh_vertls, contextMesh_facels, contextMeshMaterials)
|
||||
|
||||
def load_3ds(filepath, context, IMPORT_CONSTRAIN_BOUNDS=10.0, IMAGE_SEARCH=True, APPLY_MATRIX=False):
|
||||
global SCN
|
||||
|
||||
# XXX
|
||||
# if BPyMessages.Error_NoFile(filepath):
|
||||
# return
|
||||
|
||||
print('\n\nImporting 3DS: %r' % (filepath))
|
||||
|
||||
time1 = time.clock()
|
||||
# time1 = Blender.sys.time()
|
||||
|
||||
current_chunk = chunk()
|
||||
|
||||
file = open(filepath, 'rb')
|
||||
|
||||
#here we go!
|
||||
# print 'reading the first chunk'
|
||||
read_chunk(file, current_chunk)
|
||||
if (current_chunk.ID!=PRIMARY):
|
||||
print('\tFatal Error: Not a valid 3ds file: %r' % filepath)
|
||||
file.close()
|
||||
return
|
||||
|
||||
|
||||
# IMPORT_AS_INSTANCE = Blender.Draw.Create(0)
|
||||
# IMPORT_CONSTRAIN_BOUNDS = Blender.Draw.Create(10.0)
|
||||
# IMAGE_SEARCH = Blender.Draw.Create(1)
|
||||
# APPLY_MATRIX = Blender.Draw.Create(0)
|
||||
|
||||
# Get USER Options
|
||||
# pup_block = [\
|
||||
# ('Size Constraint:', IMPORT_CONSTRAIN_BOUNDS, 0.0, 1000.0, 'Scale the model by 10 until it reacehs the size constraint. Zero Disables.'),\
|
||||
# ('Image Search', IMAGE_SEARCH, 'Search subdirs for any assosiated images (Warning, may be slow)'),\
|
||||
# ('Transform Fix', APPLY_MATRIX, 'Workaround for object transformations importing incorrectly'),\
|
||||
# #('Group Instance', IMPORT_AS_INSTANCE, 'Import objects into a new scene and group, creating an instance in the current scene.'),\
|
||||
# ]
|
||||
|
||||
# if PREF_UI:
|
||||
# if not Blender.Draw.PupBlock('Import 3DS...', pup_block):
|
||||
# return
|
||||
|
||||
# Blender.Window.WaitCursor(1)
|
||||
|
||||
# IMPORT_CONSTRAIN_BOUNDS = IMPORT_CONSTRAIN_BOUNDS.val
|
||||
# # IMPORT_AS_INSTANCE = IMPORT_AS_INSTANCE.val
|
||||
# IMAGE_SEARCH = IMAGE_SEARCH.val
|
||||
# APPLY_MATRIX = APPLY_MATRIX.val
|
||||
|
||||
if IMPORT_CONSTRAIN_BOUNDS:
|
||||
BOUNDS_3DS[:]= [1<<30, 1<<30, 1<<30, -1<<30, -1<<30, -1<<30]
|
||||
else:
|
||||
BOUNDS_3DS[:]= []
|
||||
|
||||
##IMAGE_SEARCH
|
||||
|
||||
scn = context.scene
|
||||
# scn = bpy.data.scenes.active
|
||||
SCN = scn
|
||||
# SCN_OBJECTS = scn.objects
|
||||
# SCN_OBJECTS.selected = [] # de select all
|
||||
|
||||
importedObjects = [] # Fill this list with objects
|
||||
process_next_chunk(file, current_chunk, importedObjects, IMAGE_SEARCH)
|
||||
|
||||
|
||||
# Link the objects into this scene.
|
||||
# Layers = scn.Layers
|
||||
|
||||
# REMOVE DUMMYVERT, - remove this in the next release when blenders internal are fixed.
|
||||
|
||||
for ob in importedObjects:
|
||||
if ob.type == 'MESH':
|
||||
me = ob.data
|
||||
# me.vertices.delete([me.vertices[0],]) # XXX, todo
|
||||
if not APPLY_MATRIX:
|
||||
me.transform(ob.matrix_world.copy().invert())
|
||||
|
||||
# Done DUMMYVERT
|
||||
"""
|
||||
if IMPORT_AS_INSTANCE:
|
||||
name = filepath.split('\\')[-1].split('/')[-1]
|
||||
# Create a group for this import.
|
||||
group_scn = Scene.New(name)
|
||||
for ob in importedObjects:
|
||||
group_scn.link(ob) # dont worry about the layers
|
||||
|
||||
grp = Blender.Group.New(name)
|
||||
grp.objects = importedObjects
|
||||
|
||||
grp_ob = Object.New('Empty', name)
|
||||
grp_ob.enableDupGroup = True
|
||||
grp_ob.DupGroup = grp
|
||||
scn.link(grp_ob)
|
||||
grp_ob.Layers = Layers
|
||||
grp_ob.sel = 1
|
||||
else:
|
||||
# Select all imported objects.
|
||||
for ob in importedObjects:
|
||||
scn.link(ob)
|
||||
ob.Layers = Layers
|
||||
ob.sel = 1
|
||||
"""
|
||||
|
||||
if 0:
|
||||
# if IMPORT_CONSTRAIN_BOUNDS!=0.0:
|
||||
# Set bounds from objecyt bounding box
|
||||
for ob in importedObjects:
|
||||
if ob.type == 'MESH':
|
||||
# if ob.type=='Mesh':
|
||||
ob.makeDisplayList() # Why dosnt this update the bounds?
|
||||
for v in ob.getBoundBox():
|
||||
for i in (0,1,2):
|
||||
if v[i] < BOUNDS_3DS[i]:
|
||||
BOUNDS_3DS[i]= v[i] # min
|
||||
|
||||
if v[i] > BOUNDS_3DS[i + 3]:
|
||||
BOUNDS_3DS[i + 3]= v[i] # min
|
||||
|
||||
# Get the max axis x/y/z
|
||||
max_axis = max(BOUNDS_3DS[3]-BOUNDS_3DS[0], BOUNDS_3DS[4]-BOUNDS_3DS[1], BOUNDS_3DS[5]-BOUNDS_3DS[2])
|
||||
# print max_axis
|
||||
if max_axis < 1 << 30: # Should never be false but just make sure.
|
||||
|
||||
# Get a new scale factor if set as an option
|
||||
SCALE = 1.0
|
||||
while (max_axis * SCALE) > IMPORT_CONSTRAIN_BOUNDS:
|
||||
SCALE/=10
|
||||
|
||||
# SCALE Matrix
|
||||
SCALE_MAT = mathutils.Matrix([SCALE,0,0,0],[0,SCALE,0,0],[0,0,SCALE,0],[0,0,0,1])
|
||||
# SCALE_MAT = Blender.mathutils.Matrix([SCALE,0,0,0],[0,SCALE,0,0],[0,0,SCALE,0],[0,0,0,1])
|
||||
|
||||
for ob in importedObjects:
|
||||
ob.matrix_world = ob.matrix_world * SCALE_MAT
|
||||
|
||||
# Done constraining to bounds.
|
||||
|
||||
# Select all new objects.
|
||||
print('finished importing: %r in %.4f sec.' % (filepath, (time.clock()-time1)))
|
||||
file.close()
|
||||
|
||||
|
||||
def load(operator, context, filepath="", constrain_size=0.0, use_image_search=True, use_apply_transform=True):
|
||||
load_3ds(filepath, context, IMPORT_CONSTRAIN_BOUNDS=constrain_size, IMAGE_SEARCH=use_image_search, APPLY_MATRIX=use_apply_transform)
|
||||
return {'FINISHED'}
|
@ -1,122 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
# only reload if we alredy loaded, highly annoying
|
||||
import sys
|
||||
reload(sys.modules.get("io_scene_fbx.export_fbx", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ExportHelper
|
||||
|
||||
|
||||
class ExportFBX(bpy.types.Operator, ExportHelper):
|
||||
'''Selection to an ASCII Autodesk FBX'''
|
||||
bl_idname = "export_scene.fbx"
|
||||
bl_label = "Export FBX"
|
||||
|
||||
filename_ext = ".fbx"
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
EXP_OBS_SELECTED = BoolProperty(name="Selected Objects", description="Export selected objects on visible layers", default=True)
|
||||
# EXP_OBS_SCENE = BoolProperty(name="Scene Objects", description="Export all objects in this scene", default=True)
|
||||
TX_SCALE = FloatProperty(name="Scale", description="Scale all data, (Note! some imports dont support scaled armatures)", min=0.01, max=1000.0, soft_min=0.01, soft_max=1000.0, default=1.0)
|
||||
TX_XROT90 = BoolProperty(name="Rot X90", description="Rotate all objects 90 degrees about the X axis", default=True)
|
||||
TX_YROT90 = BoolProperty(name="Rot Y90", description="Rotate all objects 90 degrees about the Y axis", default=False)
|
||||
TX_ZROT90 = BoolProperty(name="Rot Z90", description="Rotate all objects 90 degrees about the Z axis", default=False)
|
||||
EXP_EMPTY = BoolProperty(name="Empties", description="Export empty objects", default=True)
|
||||
EXP_CAMERA = BoolProperty(name="Cameras", description="Export camera objects", default=True)
|
||||
EXP_LAMP = BoolProperty(name="Lamps", description="Export lamp objects", default=True)
|
||||
EXP_ARMATURE = BoolProperty(name="Armatures", description="Export armature objects", default=True)
|
||||
EXP_MESH = BoolProperty(name="Meshes", description="Export mesh objects", default=True)
|
||||
EXP_MESH_APPLY_MOD = BoolProperty(name="Modifiers", description="Apply modifiers to mesh objects", default=True)
|
||||
EXP_MESH_HQ_NORMALS = BoolProperty(name="HQ Normals", description="Generate high quality normals", default=True)
|
||||
EXP_IMAGE_COPY = BoolProperty(name="Copy Image Files", description="Copy image files to the destination path", default=False)
|
||||
# armature animation
|
||||
ANIM_ENABLE = BoolProperty(name="Enable Animation", description="Export keyframe animation", default=True)
|
||||
ANIM_OPTIMIZE = BoolProperty(name="Optimize Keyframes", description="Remove double keyframes", default=True)
|
||||
ANIM_OPTIMIZE_PRECISSION = FloatProperty(name="Precision", description="Tolerence for comparing double keyframes (higher for greater accuracy)", min=1, max=16, soft_min=1, soft_max=16, default=6.0)
|
||||
# ANIM_ACTION_ALL = BoolProperty(name="Current Action", description="Use actions currently applied to the armatures (use scene start/end frame)", default=True)
|
||||
ANIM_ACTION_ALL = BoolProperty(name="All Actions", description="Use all actions for armatures, if false, use current action", default=False)
|
||||
# batch
|
||||
BATCH_ENABLE = BoolProperty(name="Enable Batch", description="Automate exporting multiple scenes or groups to files", default=False)
|
||||
BATCH_GROUP = BoolProperty(name="Group > File", description="Export each group as an FBX file, if false, export each scene as an FBX file", default=False)
|
||||
BATCH_OWN_DIR = BoolProperty(name="Own Dir", description="Create a dir for each exported file", default=True)
|
||||
BATCH_FILE_PREFIX = StringProperty(name="Prefix", description="Prefix each file with this name", maxlen=1024, default="")
|
||||
|
||||
def execute(self, context):
|
||||
import math
|
||||
from mathutils import Matrix
|
||||
if not self.filepath:
|
||||
raise Exception("filepath not set")
|
||||
|
||||
mtx4_x90n = Matrix.Rotation(-math.pi / 2.0, 4, 'X')
|
||||
mtx4_y90n = Matrix.Rotation(-math.pi / 2.0, 4, 'Y')
|
||||
mtx4_z90n = Matrix.Rotation(-math.pi / 2.0, 4, 'Z')
|
||||
|
||||
GLOBAL_MATRIX = Matrix()
|
||||
GLOBAL_MATRIX[0][0] = GLOBAL_MATRIX[1][1] = GLOBAL_MATRIX[2][2] = self.TX_SCALE
|
||||
if self.TX_XROT90:
|
||||
GLOBAL_MATRIX = mtx4_x90n * GLOBAL_MATRIX
|
||||
if self.TX_YROT90:
|
||||
GLOBAL_MATRIX = mtx4_y90n * GLOBAL_MATRIX
|
||||
if self.TX_ZROT90:
|
||||
GLOBAL_MATRIX = mtx4_z90n * GLOBAL_MATRIX
|
||||
|
||||
import io_scene_fbx.export_fbx
|
||||
return io_scene_fbx.export_fbx.save(self, context, self.filepath,
|
||||
GLOBAL_MATRIX=GLOBAL_MATRIX,
|
||||
EXP_OBS_SELECTED=self.EXP_OBS_SELECTED,
|
||||
EXP_MESH=self.EXP_MESH,
|
||||
EXP_MESH_APPLY_MOD=self.EXP_MESH_APPLY_MOD,
|
||||
EXP_ARMATURE=self.EXP_ARMATURE,
|
||||
EXP_LAMP=self.EXP_LAMP,
|
||||
EXP_CAMERA=self.EXP_CAMERA,
|
||||
EXP_EMPTY=self.EXP_EMPTY,
|
||||
EXP_IMAGE_COPY=self.EXP_IMAGE_COPY,
|
||||
ANIM_ENABLE=self.ANIM_ENABLE,
|
||||
ANIM_OPTIMIZE=self.ANIM_OPTIMIZE,
|
||||
ANIM_OPTIMIZE_PRECISSION=self.ANIM_OPTIMIZE_PRECISSION,
|
||||
ANIM_ACTION_ALL=self.ANIM_ACTION_ALL,
|
||||
BATCH_ENABLE=self.BATCH_ENABLE,
|
||||
BATCH_GROUP=self.BATCH_GROUP,
|
||||
BATCH_FILE_PREFIX=self.BATCH_FILE_PREFIX,
|
||||
BATCH_OWN_DIR=self.BATCH_OWN_DIR,
|
||||
)
|
||||
|
||||
|
||||
def menu_func(self, context):
|
||||
self.layout.operator(ExportFBX.bl_idname, text="Autodesk FBX (.fbx)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,144 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
# only reload if we alredy loaded, highly annoying
|
||||
import sys
|
||||
reload(sys.modules.get("io_scene_obj.import_obj", sys))
|
||||
reload(sys.modules.get("io_scene_obj.export_obj", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ExportHelper, ImportHelper
|
||||
|
||||
|
||||
class ImportOBJ(bpy.types.Operator, ImportHelper):
|
||||
'''Load a Wavefront OBJ File'''
|
||||
bl_idname = "import_scene.obj"
|
||||
bl_label = "Import OBJ"
|
||||
|
||||
filename_ext = ".obj"
|
||||
filter_glob = StringProperty(default="*.obj;*.mtl", options={'HIDDEN'})
|
||||
|
||||
CREATE_SMOOTH_GROUPS = BoolProperty(name="Smooth Groups", description="Surround smooth groups by sharp edges", default= True)
|
||||
CREATE_FGONS = BoolProperty(name="NGons as FGons", description="Import faces with more then 4 verts as fgons", default= True)
|
||||
CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default= True)
|
||||
SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default= True)
|
||||
SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default= True)
|
||||
# old comment: only used for user feedback
|
||||
# disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
|
||||
# KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
|
||||
ROTATE_X90 = BoolProperty(name="-X90", description="Rotate X 90.", default= True)
|
||||
CLAMP_SIZE = FloatProperty(name="Clamp Scale", description="Clamp the size to this maximum (Zero to Disable)", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=0.0)
|
||||
POLYGROUPS = BoolProperty(name="Poly Groups", description="Import OBJ groups as vertex groups.", default= True)
|
||||
IMAGE_SEARCH = BoolProperty(name="Image Search", description="Search subdirs for any assosiated images (Warning, may be slow)", default= True)
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
# print("Selected: " + context.active_object.name)
|
||||
import io_scene_obj.import_obj
|
||||
return io_scene_obj.import_obj.load(self, context, **self.properties)
|
||||
'''
|
||||
load_obj(self.filepath,
|
||||
context,
|
||||
self.CLAMP_SIZE,
|
||||
self.CREATE_FGONS,
|
||||
self.CREATE_SMOOTH_GROUPS,
|
||||
self.CREATE_EDGES,
|
||||
self.SPLIT_OBJECTS,
|
||||
self.SPLIT_GROUPS,
|
||||
self.ROTATE_X90,
|
||||
self.IMAGE_SEARCH,
|
||||
self.POLYGROUPS)
|
||||
'''
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
class ExportOBJ(bpy.types.Operator, ExportHelper):
|
||||
'''Save a Wavefront OBJ File'''
|
||||
|
||||
bl_idname = "export_scene.obj"
|
||||
bl_label = 'Export OBJ'
|
||||
|
||||
filename_ext = ".obj"
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
|
||||
# context group
|
||||
use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default= False)
|
||||
use_all_scenes = BoolProperty(name="All Scenes", description="", default= False)
|
||||
use_animation = BoolProperty(name="Animation", description="", default= False)
|
||||
|
||||
# object group
|
||||
use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply modifiers (preview resolution)", default= True)
|
||||
use_rotate_x90 = BoolProperty(name="Rotate X90", description="", default= True)
|
||||
|
||||
# extra data group
|
||||
use_edges = BoolProperty(name="Edges", description="", default=True)
|
||||
use_normals = BoolProperty(name="Normals", description="", default=False)
|
||||
use_hq_normals = BoolProperty(name="High Quality Normals", description="", default=True)
|
||||
use_uvs = BoolProperty(name="UVs", description="", default= True)
|
||||
use_materials = BoolProperty(name="Materials", description="", default=True)
|
||||
copy_images = BoolProperty(name="Copy Images", description="", default=False)
|
||||
use_triangles = BoolProperty(name="Triangulate", description="", default=False)
|
||||
use_vertex_groups = BoolProperty(name="Polygroups", description="", default=False)
|
||||
use_nurbs = BoolProperty(name="Nurbs", description="", default=False)
|
||||
|
||||
# grouping group
|
||||
use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default= True)
|
||||
group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default= False)
|
||||
group_by_material = BoolProperty(name="Material Groups", description="", default= False)
|
||||
keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default= False)
|
||||
|
||||
|
||||
def execute(self, context):
|
||||
import io_scene_obj.export_obj
|
||||
return io_scene_obj.export_obj.save(self, context, **self.properties)
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(ImportOBJ.bl_idname, text="Wavefront (.obj)")
|
||||
|
||||
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(ExportOBJ.bl_idname, text="Wavefront (.obj)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||
|
||||
|
||||
# CONVERSION ISSUES
|
||||
# - matrix problem
|
||||
# - duplis - only tested dupliverts
|
||||
# - all scenes export
|
||||
# + normals calculation
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,842 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
|
||||
def fixName(name):
|
||||
if name is None:
|
||||
return 'None'
|
||||
else:
|
||||
return name.replace(' ', '_')
|
||||
|
||||
def write_mtl(scene, filepath, copy_images, mtl_dict):
|
||||
|
||||
world = scene.world
|
||||
worldAmb = world.ambient_color
|
||||
|
||||
dest_dir = os.path.dirname(filepath)
|
||||
|
||||
def copy_image(image):
|
||||
fn = bpy.path.abspath(image.filepath)
|
||||
fn_strip = os.path.basename(fn)
|
||||
if copy_images:
|
||||
rel = fn_strip
|
||||
fn_abs_dest = os.path.join(dest_dir, fn_strip)
|
||||
if not os.path.exists(fn_abs_dest):
|
||||
shutil.copy(fn, fn_abs_dest)
|
||||
else:
|
||||
rel = fn
|
||||
|
||||
return rel
|
||||
|
||||
|
||||
file = open(filepath, "w")
|
||||
# XXX
|
||||
# file.write('# Blender MTL File: %s\n' % Blender.Get('filepath').split('\\')[-1].split('/')[-1])
|
||||
file.write('# Material Count: %i\n' % len(mtl_dict))
|
||||
# Write material/image combinations we have used.
|
||||
for key, (mtl_mat_name, mat, img) in mtl_dict.items():
|
||||
|
||||
# Get the Blender data for the material and the image.
|
||||
# Having an image named None will make a bug, dont do it :)
|
||||
|
||||
file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
|
||||
|
||||
if mat:
|
||||
file.write('Ns %.6f\n' % ((mat.specular_hardness-1) * 1.9607843137254901) ) # Hardness, convert blenders 1-511 to MTL's
|
||||
file.write('Ka %.6f %.6f %.6f\n' % tuple([c*mat.ambient for c in worldAmb]) ) # Ambient, uses mirror colour,
|
||||
file.write('Kd %.6f %.6f %.6f\n' % tuple([c*mat.diffuse_intensity for c in mat.diffuse_color]) ) # Diffuse
|
||||
file.write('Ks %.6f %.6f %.6f\n' % tuple([c*mat.specular_intensity for c in mat.specular_color]) ) # Specular
|
||||
if hasattr(mat, "ior"):
|
||||
file.write('Ni %.6f\n' % mat.ior) # Refraction index
|
||||
else:
|
||||
file.write('Ni %.6f\n' % 1.0)
|
||||
file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
|
||||
|
||||
# 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
|
||||
if mat.use_shadeless:
|
||||
file.write('illum 0\n') # ignore lighting
|
||||
elif mat.specular_intensity == 0:
|
||||
file.write('illum 1\n') # no specular.
|
||||
else:
|
||||
file.write('illum 2\n') # light normaly
|
||||
|
||||
else:
|
||||
#write a dummy material here?
|
||||
file.write('Ns 0\n')
|
||||
file.write('Ka %.6f %.6f %.6f\n' % tuple([c for c in worldAmb]) ) # Ambient, uses mirror colour,
|
||||
file.write('Kd 0.8 0.8 0.8\n')
|
||||
file.write('Ks 0.8 0.8 0.8\n')
|
||||
file.write('d 1\n') # No alpha
|
||||
file.write('illum 2\n') # light normaly
|
||||
|
||||
# Write images!
|
||||
if img: # We have an image on the face!
|
||||
# write relative image path
|
||||
rel = copy_image(img)
|
||||
file.write('map_Kd %s\n' % rel) # Diffuse mapping image
|
||||
# file.write('map_Kd %s\n' % img.filepath.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
|
||||
|
||||
elif mat: # No face image. if we havea material search for MTex image.
|
||||
for mtex in mat.texture_slots:
|
||||
if mtex and mtex.texture.type == 'IMAGE':
|
||||
try:
|
||||
filepath = copy_image(mtex.texture.image)
|
||||
# filepath = mtex.texture.image.filepath.split('\\')[-1].split('/')[-1]
|
||||
file.write('map_Kd %s\n' % repr(filepath)[1:-1]) # Diffuse mapping image
|
||||
break
|
||||
except:
|
||||
# Texture has no image though its an image type, best ignore.
|
||||
pass
|
||||
|
||||
file.write('\n\n')
|
||||
|
||||
file.close()
|
||||
|
||||
# XXX not used
|
||||
def copy_file(source, dest):
|
||||
file = open(source, 'rb')
|
||||
data = file.read()
|
||||
file.close()
|
||||
|
||||
file = open(dest, 'wb')
|
||||
file.write(data)
|
||||
file.close()
|
||||
|
||||
|
||||
# XXX not used
|
||||
def copy_images(dest_dir):
|
||||
if dest_dir[-1] != os.sep:
|
||||
dest_dir += os.sep
|
||||
# if dest_dir[-1] != sys.sep:
|
||||
# dest_dir += sys.sep
|
||||
|
||||
# Get unique image names
|
||||
uniqueImages = {}
|
||||
for matname, mat, image in mtl_dict.values(): # Only use image name
|
||||
# Get Texface images
|
||||
if image:
|
||||
uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
|
||||
|
||||
# Get MTex images
|
||||
if mat:
|
||||
for mtex in mat.texture_slots:
|
||||
if mtex and mtex.texture.type == 'IMAGE':
|
||||
image_tex = mtex.texture.image
|
||||
if image_tex:
|
||||
try:
|
||||
uniqueImages[image_tex] = image_tex
|
||||
except:
|
||||
pass
|
||||
|
||||
# Now copy images
|
||||
copyCount = 0
|
||||
|
||||
# for bImage in uniqueImages.values():
|
||||
# image_path = bpy.path.abspath(bImage.filepath)
|
||||
# if bpy.sys.exists(image_path):
|
||||
# # Make a name for the target path.
|
||||
# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
|
||||
# if not bpy.utils.exists(dest_image_path): # Image isnt already there
|
||||
# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
|
||||
# copy_file(image_path, dest_image_path)
|
||||
# copyCount+=1
|
||||
|
||||
# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
|
||||
|
||||
print('\tCopied %d images' % copyCount)
|
||||
|
||||
|
||||
def test_nurbs_compat(ob):
|
||||
if ob.type != 'CURVE':
|
||||
return False
|
||||
|
||||
for nu in ob.data.splines:
|
||||
if nu.point_count_v == 1 and nu.type != 'BEZIER': # not a surface and not bezier
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def write_nurb(file, ob, ob_mat):
|
||||
tot_verts = 0
|
||||
cu = ob.data
|
||||
|
||||
# use negative indices
|
||||
for nu in cu.splines:
|
||||
if nu.type == 'POLY':
|
||||
DEG_ORDER_U = 1
|
||||
else:
|
||||
DEG_ORDER_U = nu.order_u - 1 # odd but tested to be correct
|
||||
|
||||
if nu.type == 'BEZIER':
|
||||
print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
|
||||
continue
|
||||
|
||||
if nu.point_count_v > 1:
|
||||
print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
|
||||
continue
|
||||
|
||||
if len(nu.points) <= DEG_ORDER_U:
|
||||
print("\tWarning, order_u is lower then vert count, skipping:", ob.name)
|
||||
continue
|
||||
|
||||
pt_num = 0
|
||||
do_closed = nu.use_cyclic_u
|
||||
do_endpoints = (do_closed == 0) and nu.use_endpoint_u
|
||||
|
||||
for pt in nu.points:
|
||||
pt = ob_mat * pt.co.copy().resize3D()
|
||||
file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
|
||||
pt_num += 1
|
||||
tot_verts += pt_num
|
||||
|
||||
file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
|
||||
file.write('cstype bspline\n') # not ideal, hard coded
|
||||
file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
|
||||
|
||||
curve_ls = [-(i+1) for i in range(pt_num)]
|
||||
|
||||
# 'curv' keyword
|
||||
if do_closed:
|
||||
if DEG_ORDER_U == 1:
|
||||
pt_num += 1
|
||||
curve_ls.append(-1)
|
||||
else:
|
||||
pt_num += DEG_ORDER_U
|
||||
curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
|
||||
|
||||
file.write('curv 0.0 1.0 %s\n' % (' '.join([str(i) for i in curve_ls]))) # Blender has no U and V values for the curve
|
||||
|
||||
# 'parm' keyword
|
||||
tot_parm = (DEG_ORDER_U + 1) + pt_num
|
||||
tot_parm_div = float(tot_parm-1)
|
||||
parm_ls = [(i/tot_parm_div) for i in range(tot_parm)]
|
||||
|
||||
if do_endpoints: # end points, force param
|
||||
for i in range(DEG_ORDER_U+1):
|
||||
parm_ls[i] = 0.0
|
||||
parm_ls[-(1+i)] = 1.0
|
||||
|
||||
file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
|
||||
|
||||
file.write('end\n')
|
||||
|
||||
return tot_verts
|
||||
|
||||
def write_file(filepath, objects, scene,
|
||||
EXPORT_TRI=False,
|
||||
EXPORT_EDGES=False,
|
||||
EXPORT_NORMALS=False,
|
||||
EXPORT_NORMALS_HQ=False,
|
||||
EXPORT_UV=True,
|
||||
EXPORT_MTL=True,
|
||||
EXPORT_COPY_IMAGES=False,
|
||||
EXPORT_APPLY_MODIFIERS=True,
|
||||
EXPORT_ROTX90=True,
|
||||
EXPORT_BLEN_OBS=True,
|
||||
EXPORT_GROUP_BY_OB=False,
|
||||
EXPORT_GROUP_BY_MAT=False,
|
||||
EXPORT_KEEP_VERT_ORDER=False,
|
||||
EXPORT_POLYGROUPS=False,
|
||||
EXPORT_CURVE_AS_NURBS=True):
|
||||
'''
|
||||
Basic write function. The context and options must be already set
|
||||
This can be accessed externaly
|
||||
eg.
|
||||
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
|
||||
'''
|
||||
|
||||
# XXX
|
||||
import math
|
||||
|
||||
def veckey3d(v):
|
||||
return round(v.x, 6), round(v.y, 6), round(v.z, 6)
|
||||
|
||||
def veckey2d(v):
|
||||
return round(v[0], 6), round(v[1], 6)
|
||||
# return round(v.x, 6), round(v.y, 6)
|
||||
|
||||
def findVertexGroupName(face, vWeightMap):
|
||||
"""
|
||||
Searches the vertexDict to see what groups is assigned to a given face.
|
||||
We use a frequency system in order to sort out the name because a given vetex can
|
||||
belong to two or more groups at the same time. To find the right name for the face
|
||||
we list all the possible vertex group names with their frequency and then sort by
|
||||
frequency in descend order. The top element is the one shared by the highest number
|
||||
of vertices is the face's group
|
||||
"""
|
||||
weightDict = {}
|
||||
for vert_index in face.vertices:
|
||||
# for vert in face:
|
||||
vWeights = vWeightMap[vert_index]
|
||||
# vWeights = vWeightMap[vert]
|
||||
for vGroupName, weight in vWeights:
|
||||
weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
|
||||
|
||||
if weightDict:
|
||||
alist = [(weight,vGroupName) for vGroupName, weight in weightDict.items()] # sort least to greatest amount of weight
|
||||
alist.sort()
|
||||
return(alist[-1][1]) # highest value last
|
||||
else:
|
||||
return '(null)'
|
||||
|
||||
print('OBJ Export path: %r' % filepath)
|
||||
temp_mesh_name = '~tmp-mesh'
|
||||
|
||||
time1 = time.clock()
|
||||
# time1 = sys.time()
|
||||
# scn = Scene.GetCurrent()
|
||||
|
||||
file = open(filepath, "w")
|
||||
|
||||
# Write Header
|
||||
file.write('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
|
||||
file.write('# www.blender.org\n')
|
||||
|
||||
# Tell the obj file what material file to use.
|
||||
if EXPORT_MTL:
|
||||
mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
|
||||
file.write('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1]) # filepath can contain non utf8 chars, use repr
|
||||
|
||||
if EXPORT_ROTX90:
|
||||
mat_xrot90= mathutils.Matrix.Rotation(-math.pi/2, 4, 'X')
|
||||
|
||||
# Initialize totals, these are updated each object
|
||||
totverts = totuvco = totno = 1
|
||||
|
||||
face_vert_index = 1
|
||||
|
||||
globalNormals = {}
|
||||
|
||||
# A Dict of Materials
|
||||
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
|
||||
mtl_dict = {}
|
||||
|
||||
# Get all meshes
|
||||
for ob_main in objects:
|
||||
|
||||
# ignore dupli children
|
||||
if ob_main.parent and ob_main.parent.dupli_type != 'NONE':
|
||||
# XXX
|
||||
print(ob_main.name, 'is a dupli child - ignoring')
|
||||
continue
|
||||
|
||||
obs = []
|
||||
if ob_main.dupli_type != 'NONE':
|
||||
# XXX
|
||||
print('creating dupli_list on', ob_main.name)
|
||||
ob_main.create_dupli_list(scene)
|
||||
|
||||
obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
|
||||
|
||||
# XXX debug print
|
||||
print(ob_main.name, 'has', len(obs), 'dupli children')
|
||||
else:
|
||||
obs = [(ob_main, ob_main.matrix_world)]
|
||||
|
||||
for ob, ob_mat in obs:
|
||||
|
||||
# Nurbs curve support
|
||||
if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
|
||||
if EXPORT_ROTX90:
|
||||
ob_mat = ob_mat * mat_xrot90
|
||||
totverts += write_nurb(file, ob, ob_mat)
|
||||
continue
|
||||
# END NURBS
|
||||
|
||||
if ob.type != 'MESH':
|
||||
continue
|
||||
|
||||
me = ob.create_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW')
|
||||
|
||||
if EXPORT_ROTX90:
|
||||
me.transform(mat_xrot90 * ob_mat)
|
||||
else:
|
||||
me.transform(ob_mat)
|
||||
|
||||
# # Will work for non meshes now! :)
|
||||
# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
|
||||
# if not me:
|
||||
# continue
|
||||
|
||||
if EXPORT_UV:
|
||||
faceuv = len(me.uv_textures) > 0
|
||||
if faceuv:
|
||||
uv_layer = me.uv_textures.active.data[:]
|
||||
else:
|
||||
faceuv = False
|
||||
|
||||
me_verts = me.vertices[:]
|
||||
|
||||
# Make our own list so it can be sorted to reduce context switching
|
||||
face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
|
||||
# faces = [ f for f in me.faces ]
|
||||
|
||||
if EXPORT_EDGES:
|
||||
edges = me.edges
|
||||
else:
|
||||
edges = []
|
||||
|
||||
if not (len(face_index_pairs)+len(edges)+len(me.vertices)): # Make sure there is somthing to write
|
||||
|
||||
# clean up
|
||||
bpy.data.meshes.remove(me)
|
||||
|
||||
continue # dont bother with this mesh.
|
||||
|
||||
# XXX
|
||||
# High Quality Normals
|
||||
if EXPORT_NORMALS and face_index_pairs:
|
||||
me.calc_normals()
|
||||
# if EXPORT_NORMALS_HQ:
|
||||
# BPyMesh.meshCalcNormals(me)
|
||||
# else:
|
||||
# # transforming normals is incorrect
|
||||
# # when the matrix is scaled,
|
||||
# # better to recalculate them
|
||||
# me.calcNormals()
|
||||
|
||||
materials = me.materials
|
||||
|
||||
materialNames = []
|
||||
materialItems = [m for m in materials]
|
||||
if materials:
|
||||
for mat in materials:
|
||||
if mat:
|
||||
materialNames.append(mat.name)
|
||||
else:
|
||||
materialNames.append(None)
|
||||
# Cant use LC because some materials are None.
|
||||
# materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
|
||||
|
||||
# Possible there null materials, will mess up indicies
|
||||
# but at least it will export, wait until Blender gets fixed.
|
||||
materialNames.extend((16-len(materialNames)) * [None])
|
||||
materialItems.extend((16-len(materialItems)) * [None])
|
||||
|
||||
# Sort by Material, then images
|
||||
# so we dont over context switch in the obj file.
|
||||
if EXPORT_KEEP_VERT_ORDER:
|
||||
pass
|
||||
elif faceuv:
|
||||
face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(uv_layer[a[1]].image), a[0].use_smooth))
|
||||
elif len(materials) > 1:
|
||||
face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].use_smooth))
|
||||
else:
|
||||
# no materials
|
||||
face_index_pairs.sort(key = lambda a: a[0].use_smooth)
|
||||
# if EXPORT_KEEP_VERT_ORDER:
|
||||
# pass
|
||||
# elif faceuv:
|
||||
# try: faces.sort(key = lambda a: (a.mat, a.image, a.use_smooth))
|
||||
# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.use_smooth), (b.mat, b.image, b.use_smooth)))
|
||||
# elif len(materials) > 1:
|
||||
# try: faces.sort(key = lambda a: (a.mat, a.use_smooth))
|
||||
# except: faces.sort(lambda a,b: cmp((a.mat, a.use_smooth), (b.mat, b.use_smooth)))
|
||||
# else:
|
||||
# # no materials
|
||||
# try: faces.sort(key = lambda a: a.use_smooth)
|
||||
# except: faces.sort(lambda a,b: cmp(a.use_smooth, b.use_smooth))
|
||||
|
||||
# Set the default mat to no material and no image.
|
||||
contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
|
||||
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
|
||||
|
||||
if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
|
||||
name1 = ob.name
|
||||
name2 = ob.data.name
|
||||
if name1 == name2:
|
||||
obnamestring = fixName(name1)
|
||||
else:
|
||||
obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
|
||||
|
||||
if EXPORT_BLEN_OBS:
|
||||
file.write('o %s\n' % obnamestring) # Write Object name
|
||||
else: # if EXPORT_GROUP_BY_OB:
|
||||
file.write('g %s\n' % obnamestring)
|
||||
|
||||
|
||||
# Vert
|
||||
for v in me_verts:
|
||||
file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
|
||||
|
||||
# UV
|
||||
if faceuv:
|
||||
uv_face_mapping = [[0,0,0,0] for i in range(len(face_index_pairs))] # a bit of a waste for tri's :/
|
||||
|
||||
uv_dict = {} # could use a set() here
|
||||
uv_layer = me.uv_textures.active.data
|
||||
for f, f_index in face_index_pairs:
|
||||
for uv_index, uv in enumerate(uv_layer[f_index].uv):
|
||||
uvkey = veckey2d(uv)
|
||||
try:
|
||||
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
|
||||
except:
|
||||
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
|
||||
file.write('vt %.6f %.6f\n' % tuple(uv))
|
||||
|
||||
uv_unique_count = len(uv_dict)
|
||||
# del uv, uvkey, uv_dict, f_index, uv_index
|
||||
# Only need uv_unique_count and uv_face_mapping
|
||||
|
||||
# NORMAL, Smooth/Non smoothed.
|
||||
if EXPORT_NORMALS:
|
||||
for f, f_index in face_index_pairs:
|
||||
if f.use_smooth:
|
||||
for v_idx in f.vertices:
|
||||
v = me_verts[v_idx]
|
||||
noKey = veckey3d(v.normal)
|
||||
if noKey not in globalNormals:
|
||||
globalNormals[noKey] = totno
|
||||
totno +=1
|
||||
file.write('vn %.6f %.6f %.6f\n' % noKey)
|
||||
else:
|
||||
# Hard, 1 normal from the face.
|
||||
noKey = veckey3d(f.normal)
|
||||
if noKey not in globalNormals:
|
||||
globalNormals[noKey] = totno
|
||||
totno +=1
|
||||
file.write('vn %.6f %.6f %.6f\n' % noKey)
|
||||
|
||||
if not faceuv:
|
||||
f_image = None
|
||||
|
||||
# XXX
|
||||
if EXPORT_POLYGROUPS:
|
||||
# Retrieve the list of vertex groups
|
||||
vertGroupNames = [g.name for g in ob.vertex_groups]
|
||||
|
||||
currentVGroup = ''
|
||||
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
|
||||
vgroupsMap = [[] for _i in range(len(me_verts))]
|
||||
for v_idx, v in enumerate(me.vertices):
|
||||
for g in v.groups:
|
||||
vgroupsMap[v_idx].append((vertGroupNames[g.group], g.weight))
|
||||
|
||||
for f, f_index in face_index_pairs:
|
||||
f_smooth= f.use_smooth
|
||||
f_mat = min(f.material_index, len(materialNames)-1)
|
||||
# f_mat = min(f.mat, len(materialNames)-1)
|
||||
if faceuv:
|
||||
|
||||
tface = uv_layer[f_index]
|
||||
|
||||
f_image = tface.image
|
||||
f_uv = tface.uv
|
||||
# f_uv= [tface.uv1, tface.uv2, tface.uv3]
|
||||
# if len(f.vertices) == 4:
|
||||
# f_uv.append(tface.uv4)
|
||||
# f_image = f.image
|
||||
# f_uv= f.uv
|
||||
|
||||
# MAKE KEY
|
||||
if faceuv and f_image: # Object is always true.
|
||||
key = materialNames[f_mat], f_image.name
|
||||
else:
|
||||
key = materialNames[f_mat], None # No image, use None instead.
|
||||
|
||||
# Write the vertex group
|
||||
if EXPORT_POLYGROUPS:
|
||||
if ob.vertex_groups:
|
||||
# find what vertext group the face belongs to
|
||||
theVGroup = findVertexGroupName(f,vgroupsMap)
|
||||
if theVGroup != currentVGroup:
|
||||
currentVGroup = theVGroup
|
||||
file.write('g %s\n' % theVGroup)
|
||||
|
||||
# CHECK FOR CONTEXT SWITCH
|
||||
if key == contextMat:
|
||||
pass # Context already switched, dont do anything
|
||||
else:
|
||||
if key[0] is None and key[1] is None:
|
||||
# Write a null material, since we know the context has changed.
|
||||
if EXPORT_GROUP_BY_MAT:
|
||||
# can be mat_image or (null)
|
||||
file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) ) # can be mat_image or (null)
|
||||
file.write('usemtl (null)\n') # mat, image
|
||||
|
||||
else:
|
||||
mat_data= mtl_dict.get(key)
|
||||
if not mat_data:
|
||||
# First add to global dict so we can export to mtl
|
||||
# Then write mtl
|
||||
|
||||
# Make a new names from the mat and image name,
|
||||
# converting any spaces to underscores with fixName.
|
||||
|
||||
# If none image dont bother adding it to the name
|
||||
if key[1] is None:
|
||||
mat_data = mtl_dict[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
|
||||
else:
|
||||
mat_data = mtl_dict[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
|
||||
|
||||
if EXPORT_GROUP_BY_MAT:
|
||||
file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
|
||||
|
||||
file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
|
||||
|
||||
contextMat = key
|
||||
if f_smooth != contextSmooth:
|
||||
if f_smooth: # on now off
|
||||
file.write('s 1\n')
|
||||
contextSmooth = f_smooth
|
||||
else: # was off now on
|
||||
file.write('s off\n')
|
||||
contextSmooth = f_smooth
|
||||
|
||||
f_v_orig = [me_verts[v_idx] for v_idx in f.vertices]
|
||||
|
||||
if not EXPORT_TRI or len(f_v_orig) == 3:
|
||||
f_v_iter = (f_v_orig, )
|
||||
else:
|
||||
f_v_iter = (f_v_orig[0], f_v_orig[1], f_v_orig[2]), (f_v_orig[0], f_v_orig[2], f_v_orig[3])
|
||||
|
||||
# support for triangulation
|
||||
for f_v in f_v_iter:
|
||||
file.write('f')
|
||||
|
||||
if faceuv:
|
||||
if EXPORT_NORMALS:
|
||||
if f_smooth: # Smoothed, use vertex normals
|
||||
for vi, v in enumerate(f_v):
|
||||
file.write( ' %d/%d/%d' % \
|
||||
(v.index + totverts,
|
||||
totuvco + uv_face_mapping[f_index][vi],
|
||||
globalNormals[ veckey3d(v.normal) ]) ) # vert, uv, normal
|
||||
|
||||
else: # No smoothing, face normals
|
||||
no = globalNormals[ veckey3d(f.normal) ]
|
||||
for vi, v in enumerate(f_v):
|
||||
file.write( ' %d/%d/%d' % \
|
||||
(v.index + totverts,
|
||||
totuvco + uv_face_mapping[f_index][vi],
|
||||
no) ) # vert, uv, normal
|
||||
else: # No Normals
|
||||
for vi, v in enumerate(f_v):
|
||||
file.write( ' %d/%d' % (\
|
||||
v.index + totverts,\
|
||||
totuvco + uv_face_mapping[f_index][vi])) # vert, uv
|
||||
|
||||
face_vert_index += len(f_v)
|
||||
|
||||
else: # No UV's
|
||||
if EXPORT_NORMALS:
|
||||
if f_smooth: # Smoothed, use vertex normals
|
||||
for v in f_v:
|
||||
file.write( ' %d//%d' %
|
||||
(v.index + totverts, globalNormals[ veckey3d(v.normal) ]) )
|
||||
else: # No smoothing, face normals
|
||||
no = globalNormals[ veckey3d(f.normal) ]
|
||||
for v in f_v:
|
||||
file.write( ' %d//%d' % (v.index + totverts, no) )
|
||||
else: # No Normals
|
||||
for v in f_v:
|
||||
file.write( ' %d' % (v.index + totverts) )
|
||||
|
||||
file.write('\n')
|
||||
|
||||
# Write edges.
|
||||
if EXPORT_EDGES:
|
||||
for ed in edges:
|
||||
if ed.is_loose:
|
||||
file.write('f %d %d\n' % (ed.vertices[0] + totverts, ed.vertices[1] + totverts))
|
||||
|
||||
# Make the indicies global rather then per mesh
|
||||
totverts += len(me_verts)
|
||||
if faceuv:
|
||||
totuvco += uv_unique_count
|
||||
|
||||
# clean up
|
||||
bpy.data.meshes.remove(me)
|
||||
|
||||
if ob_main.dupli_type != 'NONE':
|
||||
ob_main.free_dupli_list()
|
||||
|
||||
file.close()
|
||||
|
||||
|
||||
# Now we have all our materials, save them
|
||||
if EXPORT_MTL:
|
||||
write_mtl(scene, mtlfilepath, EXPORT_COPY_IMAGES, mtl_dict)
|
||||
# if EXPORT_COPY_IMAGES:
|
||||
# dest_dir = os.path.basename(filepath)
|
||||
# # dest_dir = filepath
|
||||
# # # Remove chars until we are just the path.
|
||||
# # while dest_dir and dest_dir[-1] not in '\\/':
|
||||
# # dest_dir = dest_dir[:-1]
|
||||
# if dest_dir:
|
||||
# copy_images(dest_dir, mtl_dict)
|
||||
# else:
|
||||
# print('\tError: "%s" could not be used as a base for an image path.' % filepath)
|
||||
|
||||
print("OBJ Export time: %.2f" % (time.clock() - time1))
|
||||
|
||||
#
|
||||
def _write(context, filepath,
|
||||
EXPORT_TRI, # ok
|
||||
EXPORT_EDGES,
|
||||
EXPORT_NORMALS, # not yet
|
||||
EXPORT_NORMALS_HQ, # not yet
|
||||
EXPORT_UV, # ok
|
||||
EXPORT_MTL,
|
||||
EXPORT_COPY_IMAGES,
|
||||
EXPORT_APPLY_MODIFIERS, # ok
|
||||
EXPORT_ROTX90, # wrong
|
||||
EXPORT_BLEN_OBS,
|
||||
EXPORT_GROUP_BY_OB,
|
||||
EXPORT_GROUP_BY_MAT,
|
||||
EXPORT_KEEP_VERT_ORDER,
|
||||
EXPORT_POLYGROUPS,
|
||||
EXPORT_CURVE_AS_NURBS,
|
||||
EXPORT_SEL_ONLY, # ok
|
||||
EXPORT_ALL_SCENES, # XXX not working atm
|
||||
EXPORT_ANIMATION): # Not used
|
||||
|
||||
base_name, ext = os.path.splitext(filepath)
|
||||
context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
|
||||
|
||||
orig_scene = context.scene
|
||||
|
||||
# Exit edit mode before exporting, so current object states are exported properly.
|
||||
if bpy.ops.object.mode_set.poll():
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# if EXPORT_ALL_SCENES:
|
||||
# export_scenes = bpy.data.scenes
|
||||
# else:
|
||||
# export_scenes = [orig_scene]
|
||||
|
||||
# XXX only exporting one scene atm since changing
|
||||
# current scene is not possible.
|
||||
# Brecht says that ideally in 2.5 we won't need such a function,
|
||||
# allowing multiple scenes open at once.
|
||||
export_scenes = [orig_scene]
|
||||
|
||||
# Export all scenes.
|
||||
for scene in export_scenes:
|
||||
# scene.makeCurrent() # If already current, this is not slow.
|
||||
# context = scene.getRenderingContext()
|
||||
orig_frame = scene.frame_current
|
||||
|
||||
if EXPORT_ALL_SCENES: # Add scene name into the context_name
|
||||
context_name[1] = '_%s' % bpy.path.clean_name(scene.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
|
||||
|
||||
# Export an animation?
|
||||
if EXPORT_ANIMATION:
|
||||
scene_frames = range(scene.frame_start, context.frame_end + 1) # Up to and including the end frame.
|
||||
else:
|
||||
scene_frames = [orig_frame] # Dont export an animation.
|
||||
|
||||
# Loop through all frames in the scene and export.
|
||||
for frame in scene_frames:
|
||||
if EXPORT_ANIMATION: # Add frame to the filepath.
|
||||
context_name[2] = '_%.6d' % frame
|
||||
|
||||
scene.frame_current = frame
|
||||
if EXPORT_SEL_ONLY:
|
||||
objects = context.selected_objects
|
||||
else:
|
||||
objects = scene.objects
|
||||
|
||||
full_path= ''.join(context_name)
|
||||
|
||||
# erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
|
||||
# EXPORT THE FILE.
|
||||
write_file(full_path, objects, scene,
|
||||
EXPORT_TRI,
|
||||
EXPORT_EDGES,
|
||||
EXPORT_NORMALS,
|
||||
EXPORT_NORMALS_HQ,
|
||||
EXPORT_UV,
|
||||
EXPORT_MTL,
|
||||
EXPORT_COPY_IMAGES,
|
||||
EXPORT_APPLY_MODIFIERS,
|
||||
EXPORT_ROTX90,
|
||||
EXPORT_BLEN_OBS,
|
||||
EXPORT_GROUP_BY_OB,
|
||||
EXPORT_GROUP_BY_MAT,
|
||||
EXPORT_KEEP_VERT_ORDER,
|
||||
EXPORT_POLYGROUPS,
|
||||
EXPORT_CURVE_AS_NURBS)
|
||||
|
||||
|
||||
scene.frame_current = orig_frame
|
||||
|
||||
# Restore old active scene.
|
||||
# orig_scene.makeCurrent()
|
||||
# Window.WaitCursor(0)
|
||||
|
||||
|
||||
'''
|
||||
Currently the exporter lacks these features:
|
||||
* multiple scene export (only active scene is written)
|
||||
* particles
|
||||
'''
|
||||
|
||||
|
||||
def save(operator, context, filepath="",
|
||||
use_triangles=False,
|
||||
use_edges=False,
|
||||
use_normals=False,
|
||||
use_hq_normals=False,
|
||||
use_uvs=True,
|
||||
use_materials=True,
|
||||
copy_images=False,
|
||||
use_modifiers=True,
|
||||
use_rotate_x90=True,
|
||||
use_blen_objects=True,
|
||||
group_by_object=False,
|
||||
group_by_material=False,
|
||||
keep_vertex_order=False,
|
||||
use_vertex_groups=False,
|
||||
use_nurbs=True,
|
||||
use_selection=True,
|
||||
use_all_scenes=False,
|
||||
use_animation=False,
|
||||
):
|
||||
|
||||
_write(context, filepath,
|
||||
EXPORT_TRI=use_triangles,
|
||||
EXPORT_EDGES=use_edges,
|
||||
EXPORT_NORMALS=use_normals,
|
||||
EXPORT_NORMALS_HQ=use_hq_normals,
|
||||
EXPORT_UV=use_uvs,
|
||||
EXPORT_MTL=use_materials,
|
||||
EXPORT_COPY_IMAGES=copy_images,
|
||||
EXPORT_APPLY_MODIFIERS=use_modifiers,
|
||||
EXPORT_ROTX90=use_rotate_x90,
|
||||
EXPORT_BLEN_OBS=use_blen_objects,
|
||||
EXPORT_GROUP_BY_OB=group_by_object,
|
||||
EXPORT_GROUP_BY_MAT=group_by_material,
|
||||
EXPORT_KEEP_VERT_ORDER=keep_vertex_order,
|
||||
EXPORT_POLYGROUPS=use_vertex_groups,
|
||||
EXPORT_CURVE_AS_NURBS=use_nurbs,
|
||||
EXPORT_SEL_ONLY=use_selection,
|
||||
EXPORT_ALL_SCENES=use_all_scenes,
|
||||
EXPORT_ANIMATION=use_animation,
|
||||
)
|
||||
|
||||
return {'FINISHED'}
|
@ -1,61 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
# only reload if we alredy loaded, highly annoying
|
||||
import sys
|
||||
reload(sys.modules.get("io_scene_x3d.export_x3d", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ExportHelper
|
||||
|
||||
|
||||
class ExportX3D(bpy.types.Operator, ExportHelper):
|
||||
'''Export selection to Extensible 3D file (.x3d)'''
|
||||
bl_idname = "export_scene.x3d"
|
||||
bl_label = 'Export X3D'
|
||||
|
||||
filename_ext = ".x3d"
|
||||
|
||||
use_apply_modifiers = BoolProperty(name="Apply Modifiers", description="Use transformed mesh data from each object", default=True)
|
||||
use_triangulate = BoolProperty(name="Triangulate", description="Triangulate quads.", default=False)
|
||||
use_compress = BoolProperty(name="Compress", description="GZip the resulting file, requires a full python install", default=False)
|
||||
|
||||
def execute(self, context):
|
||||
import io_scene_x3d.export_x3d
|
||||
return io_scene_x3d.export_x3d.save(self, context, **self.properties)
|
||||
|
||||
|
||||
def menu_func(self, context):
|
||||
self.layout.operator(ExportX3D.bl_idname, text="X3D Extensible 3D (.x3d)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func)
|
||||
|
||||
# NOTES
|
||||
# - blender version is hardcoded
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,117 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# To support reload properly, try to access a package var, if it's there, reload everything
|
||||
if "bpy" in locals():
|
||||
# only reload if we alredy loaded, highly annoying
|
||||
import sys
|
||||
reload(sys.modules.get("io_shape_mdd.import_mdd", sys))
|
||||
reload(sys.modules.get("io_shape_mdd.export_mdd", sys))
|
||||
|
||||
|
||||
import bpy
|
||||
from bpy.props import *
|
||||
from io_utils import ExportHelper, ImportHelper
|
||||
|
||||
|
||||
class ImportMDD(bpy.types.Operator, ImportHelper):
|
||||
'''Import MDD vertex keyframe file to shape keys'''
|
||||
bl_idname = "import_shape.mdd"
|
||||
bl_label = "Import MDD"
|
||||
|
||||
filename_ext = ".mdd"
|
||||
filter_glob = StringProperty(default="*.mdd", options={'HIDDEN'})
|
||||
|
||||
frame_start = IntProperty(name="Start Frame", description="Start frame for inserting animation", min=-300000, max=300000, default=0)
|
||||
frame_step = IntProperty(name="Step", min=1, max=1000, default=1)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
ob = context.active_object
|
||||
return (ob and ob.type == 'MESH')
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
# initialize from scene if unset
|
||||
scene = context.scene
|
||||
if not self.frame_start:
|
||||
self.frame_start = scene.frame_current
|
||||
|
||||
import io_shape_mdd.import_mdd
|
||||
return io_shape_mdd.import_mdd.load(self, context, **self.properties)
|
||||
|
||||
class ExportMDD(bpy.types.Operator, ExportHelper):
|
||||
'''Animated mesh to MDD vertex keyframe file'''
|
||||
bl_idname = "export_shape.mdd"
|
||||
bl_label = "Export MDD"
|
||||
|
||||
filename_ext = ".mdd"
|
||||
|
||||
# get first scene to get min and max properties for frames, fps
|
||||
|
||||
minframe = 1
|
||||
maxframe = 300000
|
||||
minfps = 1
|
||||
maxfps = 120
|
||||
|
||||
# List of operator properties, the attributes will be assigned
|
||||
# to the class instance from the operator settings before calling.
|
||||
fps = IntProperty(name="Frames Per Second", description="Number of frames/second", min=minfps, max=maxfps, default=25)
|
||||
frame_start = IntProperty(name="Start Frame", description="Start frame for baking", min=minframe, max=maxframe, default=1)
|
||||
frame_end = IntProperty(name="End Frame", description="End frame for baking", min=minframe, max=maxframe, default=250)
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
obj = context.active_object
|
||||
return (obj and obj.type == 'MESH')
|
||||
|
||||
def execute(self, context):
|
||||
# initialize from scene if unset
|
||||
scene = context.scene
|
||||
if not self.frame_start:
|
||||
self.frame_start = scene.frame_start
|
||||
if not self.frame_end:
|
||||
self.frame_end = scene.frame_end
|
||||
if not self.fps:
|
||||
self.fps = scene.render.fps
|
||||
|
||||
import io_shape_mdd.export_mdd
|
||||
return io_shape_mdd.export_mdd.save(self, context, **self.properties)
|
||||
|
||||
|
||||
def menu_func_import(self, context):
|
||||
self.layout.operator(ImportMDD.bl_idname, text="Lightwave Point Cache (.mdd)")
|
||||
|
||||
|
||||
def menu_func_export(self, context):
|
||||
self.layout.operator(ExportMDD.bl_idname, text="Lightwave Point Cache (.mdd)")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.types.INFO_MT_file_import.append(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.append(menu_func_export)
|
||||
|
||||
|
||||
def unregister():
|
||||
bpy.types.INFO_MT_file_import.remove(menu_func_import)
|
||||
bpy.types.INFO_MT_file_export.remove(menu_func_export)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
@ -1,131 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# Contributors: Bill L.Nieuwendorp
|
||||
|
||||
"""
|
||||
This script Exports Lightwaves MotionDesigner format.
|
||||
|
||||
The .mdd format has become quite a popular Pipeline format<br>
|
||||
for moving animations from package to package.
|
||||
|
||||
Be sure not to use modifiers that change the number or order of verts in the mesh
|
||||
"""
|
||||
|
||||
import bpy
|
||||
import mathutils
|
||||
from struct import pack
|
||||
|
||||
|
||||
def zero_file(filepath):
|
||||
'''
|
||||
If a file fails, this replaces it with 1 char, better not remove it?
|
||||
'''
|
||||
file = open(filepath, 'w')
|
||||
file.write('\n') # apparently macosx needs some data in a blank file?
|
||||
file.close()
|
||||
|
||||
|
||||
def check_vertcount(mesh, vertcount):
|
||||
'''
|
||||
check and make sure the vertcount is consistent throughout the frame range
|
||||
'''
|
||||
if len(mesh.vertices) != vertcount:
|
||||
raise Exception('Error, number of verts has changed during animation, cannot export')
|
||||
f.close()
|
||||
zero_file(filepath)
|
||||
return
|
||||
|
||||
|
||||
def save(operator, context, filepath="", frame_start=1, frame_end=300, fps=25):
|
||||
"""
|
||||
Blender.Window.WaitCursor(1)
|
||||
|
||||
mesh_orig = Mesh.New()
|
||||
mesh_orig.getFromObject(obj.name)
|
||||
"""
|
||||
|
||||
scene = context.scene
|
||||
obj = context.object
|
||||
|
||||
if bpy.ops.object.mode_set.poll():
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
orig_frame = scene.frame_current
|
||||
scene.frame_set(frame_start)
|
||||
me = obj.create_mesh(scene, True, 'PREVIEW')
|
||||
|
||||
#Flip y and z
|
||||
mat_flip = mathutils.Matrix(\
|
||||
[1.0, 0.0, 0.0, 0.0],\
|
||||
[0.0, 0.0, 1.0, 0.0],\
|
||||
[0.0, 1.0, 0.0, 0.0],\
|
||||
[0.0, 0.0, 0.0, 1.0],\
|
||||
)
|
||||
|
||||
numverts = len(me.vertices)
|
||||
|
||||
numframes = frame_end - frame_start + 1
|
||||
fps = float(fps)
|
||||
f = open(filepath, 'wb') #no Errors yet:Safe to create file
|
||||
|
||||
# Write the header
|
||||
f.write(pack(">2i", numframes, numverts))
|
||||
|
||||
# Write the frame times (should we use the time IPO??)
|
||||
f.write(pack(">%df" % (numframes), *[frame / fps for frame in range(numframes)])) # seconds
|
||||
|
||||
#rest frame needed to keep frames in sync
|
||||
"""
|
||||
Blender.Set('curframe', frame_start)
|
||||
me_tmp.getFromObject(obj.name)
|
||||
"""
|
||||
|
||||
check_vertcount(me, numverts)
|
||||
me.transform(mat_flip * obj.matrix_world)
|
||||
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.vertices for axis in v.co]))
|
||||
|
||||
for frame in range(frame_start, frame_end + 1):#in order to start at desired frame
|
||||
"""
|
||||
Blender.Set('curframe', frame)
|
||||
me_tmp.getFromObject(obj.name)
|
||||
"""
|
||||
|
||||
scene.frame_set(frame)
|
||||
me = obj.create_mesh(scene, True, 'PREVIEW')
|
||||
check_vertcount(me, numverts)
|
||||
me.transform(mat_flip * obj.matrix_world)
|
||||
|
||||
# Write the vertex data
|
||||
f.write(pack(">%df" % (numverts * 3), *[axis for v in me.vertices for axis in v.co]))
|
||||
|
||||
"""
|
||||
me_tmp.vertices= None
|
||||
"""
|
||||
f.close()
|
||||
|
||||
print('MDD Exported: %r frames:%d\n' % (filepath, numframes - 1))
|
||||
"""
|
||||
Blender.Window.WaitCursor(0)
|
||||
Blender.Set('curframe', orig_frame)
|
||||
"""
|
||||
scene.frame_set(orig_frame)
|
||||
|
||||
return {'FINISHED'}
|
@ -1,105 +0,0 @@
|
||||
# ##### BEGIN GPL LICENSE BLOCK #####
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
# <pep8 compliant>
|
||||
|
||||
# mdd importer by Bill L.Nieuwendorp
|
||||
# conversion to blender 2.5: Ivo Grigull (loolarge)
|
||||
#
|
||||
# Warning if the vertex order or vertex count differs from the
|
||||
# origonal model the mdd was Baked out from their will be Strange
|
||||
# behavior
|
||||
#
|
||||
# vertex animation to ShapeKeys with ipo and gives the frame a value of 1.0
|
||||
# A modifier to read mdd files would be Ideal but thats for another day :)
|
||||
#
|
||||
# Please send any fixes,updates,bugs to Slow67_at_Gmail.com
|
||||
# Bill Niewuendorp
|
||||
|
||||
import bpy
|
||||
from struct import unpack
|
||||
|
||||
|
||||
def load(operator, context, filepath, frame_start=0, frame_step=1):
|
||||
|
||||
scene = context.scene
|
||||
obj = context.object
|
||||
|
||||
print('\n\nimporting mdd %r' % filepath)
|
||||
|
||||
if bpy.ops.object.mode_set.poll():
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
file = open(filepath, 'rb')
|
||||
frames, points = unpack(">2i", file.read(8))
|
||||
time = unpack((">%df" % frames), file.read(frames * 4))
|
||||
|
||||
print('\tpoints:%d frames:%d' % (points, frames))
|
||||
|
||||
# If target object doesn't have Basis shape key, create it.
|
||||
try:
|
||||
num_keys = len(obj.data.shape_keys.keys)
|
||||
except:
|
||||
basis = obj.add_shape_key()
|
||||
basis.name = "Basis"
|
||||
obj.data.update()
|
||||
|
||||
scene.frame_current = frame_start
|
||||
|
||||
def UpdateMesh(ob, fr):
|
||||
|
||||
# Insert new shape key
|
||||
new_shapekey = obj.add_shape_key()
|
||||
new_shapekey.name = ("frame_%.4d" % fr)
|
||||
new_shapekey_name = new_shapekey.name
|
||||
|
||||
obj.active_shape_key_index = len(obj.data.shape_keys.keys)-1
|
||||
index = len(obj.data.shape_keys.keys)-1
|
||||
obj.show_shape_key = True
|
||||
|
||||
verts = obj.data.shape_keys.keys[len(obj.data.shape_keys.keys)-1].data
|
||||
|
||||
|
||||
for v in verts: # 12 is the size of 3 floats
|
||||
v.co[:] = unpack('>3f', file.read(12))
|
||||
#me.update()
|
||||
obj.show_shape_key = False
|
||||
|
||||
|
||||
# insert keyframes
|
||||
shape_keys = obj.data.shape_keys
|
||||
|
||||
scene.frame_current -= 1
|
||||
obj.data.shape_keys.keys[index].value = 0.0
|
||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
||||
|
||||
scene.frame_current += 1
|
||||
obj.data.shape_keys.keys[index].value = 1.0
|
||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
||||
|
||||
scene.frame_current += 1
|
||||
obj.data.shape_keys.keys[index].value = 0.0
|
||||
shape_keys.keys[len(obj.data.shape_keys.keys)-1].keyframe_insert("value")
|
||||
|
||||
obj.data.update()
|
||||
|
||||
|
||||
for i in range(frames):
|
||||
UpdateMesh(obj, i)
|
||||
|
||||
return {'FINISHED'}
|
@ -35,14 +35,14 @@ class MeshSelectInteriorFaces(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
ob = context.active_object
|
||||
bpy.ops.mesh.selection_type(type='FACE')
|
||||
context.tool_settings.mesh_select_mode = False, False, True
|
||||
is_editmode = (ob.mode == 'EDIT')
|
||||
if is_editmode:
|
||||
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
|
||||
|
||||
mesh = ob.data
|
||||
|
||||
face_list = [face for face in mesh.faces]
|
||||
face_list = mesh.faces[:]
|
||||
face_edge_keys = [face.edge_keys for face in face_list]
|
||||
|
||||
edge_face_count = mesh.edge_face_count_dict
|
||||
@ -173,11 +173,11 @@ class MeshMirrorUV(bpy.types.Operator):
|
||||
|
||||
|
||||
def register():
|
||||
pass
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
@ -40,14 +40,14 @@ def pose_info():
|
||||
binfo["pbone"] = pbone
|
||||
binfo["matrix_local"] = bone.matrix_local.copy()
|
||||
try:
|
||||
binfo["matrix_local_inv"] = binfo["matrix_local"].copy().invert()
|
||||
binfo["matrix_local_inv"] = binfo["matrix_local"].inverted()
|
||||
except:
|
||||
binfo["matrix_local_inv"] = Matrix()
|
||||
|
||||
binfo["matrix"] = bone.matrix.copy()
|
||||
binfo["matrix_pose"] = pbone.matrix.copy()
|
||||
try:
|
||||
binfo["matrix_pose_inv"] = binfo["matrix_pose"].copy().invert()
|
||||
binfo["matrix_pose_inv"] = binfo["matrix_pose"].inverted()
|
||||
except:
|
||||
binfo["matrix_pose_inv"] = Matrix()
|
||||
|
||||
@ -67,7 +67,7 @@ def pose_info():
|
||||
matrix = binfo_parent["matrix_pose_inv"] * matrix
|
||||
rest_matrix = binfo_parent["matrix_local_inv"] * rest_matrix
|
||||
|
||||
matrix = rest_matrix.copy().invert() * matrix
|
||||
matrix = rest_matrix.inverted() * matrix
|
||||
|
||||
binfo["matrix_key"] = matrix.copy()
|
||||
|
||||
@ -75,8 +75,6 @@ def pose_info():
|
||||
|
||||
|
||||
def bake(frame_start, frame_end, step=1, only_selected=False):
|
||||
# import nla; reload(nla); nla.bake()
|
||||
|
||||
scene = bpy.context.scene
|
||||
obj = bpy.context.object
|
||||
pose = obj.pose
|
||||
@ -106,9 +104,9 @@ def bake(frame_start, frame_end, step=1, only_selected=False):
|
||||
for f in frame_range:
|
||||
matrix = info_ls[int((f - frame_start) / step)][name]["matrix_key"]
|
||||
|
||||
#pbone.location = matrix.translation_part()
|
||||
#pbone.rotation_quaternion = matrix.to_quat()
|
||||
pbone.matrix_local = [f for v in matrix for f in v]
|
||||
#pbone.location = matrix.to_translation()
|
||||
#pbone.rotation_quaternion = matrix.to_quaternion()
|
||||
pbone.matrix_basis = matrix
|
||||
|
||||
pbone.keyframe_insert("location", -1, f, name)
|
||||
|
||||
@ -149,7 +147,7 @@ class BakeAction(bpy.types.Operator):
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
action = bake(self.frame_start, self.frame_end, self.step, self.show_only_selected)
|
||||
action = bake(self.frame_start, self.frame_end, self.step, self.only_selected)
|
||||
|
||||
# basic cleanup, could move elsewhere
|
||||
for fcu in action.fcurves:
|
||||
@ -177,13 +175,11 @@ class BakeAction(bpy.types.Operator):
|
||||
|
||||
|
||||
def register():
|
||||
pass
|
||||
# bpy.types.INFO_MT_mesh_add.append(menu_func)
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
|
||||
def unregister():
|
||||
pass
|
||||
# bpy.types.INFO_MT_mesh_add.remove(menu_func)
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|