VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
/* $Id$
|
|
|
|
-----------------------------------------------------------------------------
|
|
|
|
This source file is part of VideoTexture library
|
|
|
|
|
|
|
|
Copyright (c) 2007 The Zdeno Ash Miklas
|
|
|
|
|
|
|
|
This program is free software; you can redistribute it and/or modify it under
|
|
|
|
the terms of the GNU Lesser General Public License as published by the Free Software
|
|
|
|
Foundation; either version 2 of the License, or (at your option) any later
|
|
|
|
version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful, but WITHOUT
|
|
|
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
|
|
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Lesser General Public License along with
|
|
|
|
this program; if not, write to the Free Software Foundation, Inc., 59 Temple
|
|
|
|
Place - Suite 330, Boston, MA 02111-1307, USA, or go to
|
|
|
|
http://www.gnu.org/copyleft/lesser.txt.
|
|
|
|
-----------------------------------------------------------------------------
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "ImageBase.h"
|
|
|
|
|
|
|
|
#include <vector>
|
|
|
|
#include <string.h>
|
|
|
|
|
2008-11-02 18:02:31 +00:00
|
|
|
#include <PyObjectPlus.h>
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
#include <structmember.h>
|
|
|
|
|
|
|
|
#include "FilterBase.h"
|
|
|
|
|
|
|
|
#include "Exception.h"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// ImageBase class implementation
|
|
|
|
|
|
|
|
// constructor
|
|
|
|
ImageBase::ImageBase (bool staticSrc) : m_image(NULL), m_imgSize(0),
|
|
|
|
m_avail(false), m_scale(false), m_scaleChange(false), m_flip(false),
|
|
|
|
m_staticSources(staticSrc), m_pyfilter(NULL)
|
|
|
|
{
|
|
|
|
m_size[0] = m_size[1] = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// destructor
|
|
|
|
ImageBase::~ImageBase (void)
|
|
|
|
{
|
|
|
|
// release image
|
|
|
|
delete [] m_image;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// release python objects
|
|
|
|
bool ImageBase::release (void)
|
|
|
|
{
|
|
|
|
// iterate sources
|
|
|
|
for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
{
|
|
|
|
// release source object
|
|
|
|
delete *it;
|
|
|
|
*it = NULL;
|
|
|
|
}
|
|
|
|
// release filter object
|
|
|
|
Py_XDECREF(m_pyfilter);
|
|
|
|
m_pyfilter = NULL;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// get image
|
|
|
|
unsigned int * ImageBase::getImage (unsigned int texId)
|
|
|
|
{
|
|
|
|
// if image is not available
|
|
|
|
if (!m_avail)
|
|
|
|
{
|
|
|
|
// if there are any sources
|
|
|
|
if (!m_sources.empty())
|
|
|
|
{
|
|
|
|
// get images from sources
|
|
|
|
for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
// get source image
|
|
|
|
(*it)->getImage();
|
|
|
|
// init image
|
|
|
|
init(m_sources[0]->getSize()[0], m_sources[0]->getSize()[1]);
|
|
|
|
}
|
|
|
|
// calculate new image
|
|
|
|
calcImage(texId);
|
|
|
|
}
|
|
|
|
// if image is available, return it, otherwise NULL
|
|
|
|
return m_avail ? m_image : NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// refresh image source
|
|
|
|
void ImageBase::refresh (void)
|
|
|
|
{
|
|
|
|
// invalidate this image
|
|
|
|
m_avail = false;
|
|
|
|
// refresh all sources
|
|
|
|
for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
(*it)->refresh();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// get source object
|
|
|
|
PyImage * ImageBase::getSource (const char * id)
|
|
|
|
{
|
|
|
|
// find source
|
|
|
|
ImageSourceList::iterator src = findSource(id);
|
|
|
|
// return it, if found
|
|
|
|
return src != m_sources.end() ? (*src)->getSource() : NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set source object
|
|
|
|
bool ImageBase::setSource (const char * id, PyImage * source)
|
|
|
|
{
|
|
|
|
// find source
|
|
|
|
ImageSourceList::iterator src = findSource(id);
|
|
|
|
// check source loop
|
|
|
|
if (source != NULL && source->m_image->loopDetect(this))
|
|
|
|
return false;
|
|
|
|
// if found, set new object
|
|
|
|
if (src != m_sources.end())
|
|
|
|
// if new object is not empty or sources are static
|
|
|
|
if (source != NULL || m_staticSources)
|
|
|
|
// replace previous source
|
|
|
|
(*src)->setSource(source);
|
|
|
|
// otherwise delete source
|
|
|
|
else
|
|
|
|
m_sources.erase(src);
|
|
|
|
// if source is not found and adding is allowed
|
|
|
|
else
|
|
|
|
if (!m_staticSources)
|
|
|
|
{
|
|
|
|
// create new source
|
|
|
|
ImageSource * newSrc = newSource(id);
|
|
|
|
newSrc->setSource(source);
|
|
|
|
// if source was created, add it to source list
|
|
|
|
if (newSrc != NULL) m_sources.push_back(newSrc);
|
|
|
|
}
|
|
|
|
// otherwise source wasn't set
|
|
|
|
else
|
|
|
|
return false;
|
|
|
|
// source was set
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set pixel filter
|
|
|
|
void ImageBase::setFilter (PyFilter * filt)
|
|
|
|
{
|
|
|
|
// reference new filter
|
|
|
|
if (filt != NULL) Py_INCREF(filt);
|
|
|
|
// release previous filter
|
|
|
|
Py_XDECREF(m_pyfilter);
|
|
|
|
// set new filter
|
|
|
|
m_pyfilter = filt;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// initialize image data
|
|
|
|
void ImageBase::init (short width, short height)
|
|
|
|
{
|
|
|
|
// if image has to be scaled
|
|
|
|
if (m_scale)
|
|
|
|
{
|
|
|
|
// recalc sizes of image
|
|
|
|
width = calcSize(width);
|
|
|
|
height = calcSize(height);
|
|
|
|
}
|
|
|
|
// if sizes differ
|
|
|
|
if (width != m_size[0] || height != m_size[1])
|
|
|
|
{
|
|
|
|
// new buffer size
|
|
|
|
unsigned int newSize = width * height;
|
|
|
|
// if new buffer is larger than previous
|
|
|
|
if (newSize > m_imgSize)
|
|
|
|
{
|
|
|
|
// set new buffer size
|
|
|
|
m_imgSize = newSize;
|
|
|
|
// release previous and create new buffer
|
|
|
|
delete [] m_image;
|
|
|
|
m_image = new unsigned int[m_imgSize];
|
|
|
|
}
|
|
|
|
// new image size
|
|
|
|
m_size[0] = width;
|
|
|
|
m_size[1] = height;
|
|
|
|
// scale was processed
|
|
|
|
m_scaleChange = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// find source
|
|
|
|
ImageSourceList::iterator ImageBase::findSource (const char * id)
|
|
|
|
{
|
|
|
|
// iterate sources
|
|
|
|
ImageSourceList::iterator it;
|
|
|
|
for (it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
// if id matches, return iterator
|
|
|
|
if ((*it)->is(id)) return it;
|
|
|
|
// source not found
|
|
|
|
return it;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// check sources sizes
|
|
|
|
bool ImageBase::checkSourceSizes (void)
|
|
|
|
{
|
|
|
|
// reference size
|
|
|
|
short * refSize = NULL;
|
|
|
|
// iterate sources
|
|
|
|
for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
{
|
|
|
|
// get size of current source
|
|
|
|
short * curSize = (*it)->getSize();
|
|
|
|
// if size is available and is not empty
|
2009-08-19 10:26:43 +00:00
|
|
|
if (curSize[0] != 0 && curSize[1] != 0) {
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
// if reference size is not set
|
2009-08-19 10:26:43 +00:00
|
|
|
if (refSize == NULL) {
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
// set current size as reference
|
|
|
|
refSize = curSize;
|
|
|
|
// otherwise check with current size
|
2009-08-19 10:26:43 +00:00
|
|
|
} else if (curSize[0] != refSize[0] || curSize[1] != refSize[1]) {
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
// if they don't match, report it
|
|
|
|
return false;
|
2009-08-19 10:26:43 +00:00
|
|
|
}
|
|
|
|
}
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
}
|
|
|
|
// all sizes match
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// compute nearest power of 2 value
|
|
|
|
short ImageBase::calcSize (short size)
|
|
|
|
{
|
|
|
|
// while there is more than 1 bit in size value
|
|
|
|
while ((size & (size - 1)) != 0)
|
|
|
|
// clear last bit
|
|
|
|
size = size & (size - 1);
|
|
|
|
// return result
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// perform loop detection
|
|
|
|
bool ImageBase::loopDetect (ImageBase * img)
|
|
|
|
{
|
|
|
|
// if this object is the same as parameter, loop is detected
|
|
|
|
if (this == img) return true;
|
|
|
|
// check all sources
|
|
|
|
for (ImageSourceList::iterator it = m_sources.begin(); it != m_sources.end(); ++it)
|
|
|
|
// if source detected loop, return this result
|
|
|
|
if ((*it)->getSource() != NULL && (*it)->getSource()->m_image->loopDetect(img))
|
|
|
|
return true;
|
|
|
|
// no loop detected
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// ImageSource class implementation
|
|
|
|
|
|
|
|
// constructor
|
|
|
|
ImageSource::ImageSource (const char * id) : m_source(NULL), m_image(NULL)
|
|
|
|
{
|
|
|
|
// copy id
|
|
|
|
int idx;
|
|
|
|
for (idx = 0; id[idx] != '\0' && idx < SourceIdSize - 1; ++idx)
|
|
|
|
m_id[idx] = id[idx];
|
|
|
|
m_id[idx] = '\0';
|
|
|
|
}
|
|
|
|
|
|
|
|
// destructor
|
|
|
|
ImageSource::~ImageSource (void)
|
|
|
|
{
|
|
|
|
// release source
|
|
|
|
setSource(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// compare id
|
|
|
|
bool ImageSource::is (const char * id)
|
|
|
|
{
|
|
|
|
for (char * myId = m_id; *myId != '\0'; ++myId, ++id)
|
|
|
|
if (*myId != *id) return false;
|
|
|
|
return *id == '\0';
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set source object
|
|
|
|
void ImageSource::setSource (PyImage * source)
|
|
|
|
{
|
|
|
|
// reference new source
|
|
|
|
if (source != NULL) Py_INCREF(source);
|
|
|
|
// release previous source
|
|
|
|
Py_XDECREF(m_source);
|
|
|
|
// set new source
|
|
|
|
m_source = source;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// get image from source
|
|
|
|
unsigned int * ImageSource::getImage (void)
|
|
|
|
{
|
|
|
|
// if source is available
|
|
|
|
if (m_source != NULL)
|
|
|
|
// get image from source
|
|
|
|
m_image = m_source->m_image->getImage();
|
|
|
|
// otherwise reset buffer
|
|
|
|
else
|
|
|
|
m_image = NULL;
|
|
|
|
// return image
|
|
|
|
return m_image;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// refresh source
|
|
|
|
void ImageSource::refresh (void)
|
|
|
|
{
|
|
|
|
// if source is available, refresh it
|
|
|
|
if (m_source != NULL) m_source->m_image->refresh();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// list of image types
|
|
|
|
PyTypeList pyImageTypes;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// functions for python interface
|
|
|
|
|
|
|
|
// object allocation
|
|
|
|
PyObject * Image_allocNew (PyTypeObject * type, PyObject * args, PyObject * kwds)
|
|
|
|
{
|
|
|
|
// allocate object
|
|
|
|
PyImage * self = reinterpret_cast<PyImage*>(type->tp_alloc(type, 0));
|
|
|
|
// initialize object structure
|
|
|
|
self->m_image = NULL;
|
|
|
|
// return allocated object
|
|
|
|
return reinterpret_cast<PyObject*>(self);
|
|
|
|
}
|
|
|
|
|
|
|
|
// object deallocation
|
|
|
|
void Image_dealloc (PyImage * self)
|
|
|
|
{
|
|
|
|
// release object attributes
|
|
|
|
if (self->m_image != NULL)
|
|
|
|
{
|
|
|
|
// if release requires deleting of object, do it
|
|
|
|
if (self->m_image->release())
|
|
|
|
delete self->m_image;
|
|
|
|
self->m_image = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// get image data
|
|
|
|
PyObject * Image_getImage (PyImage * self, void * closure)
|
|
|
|
{
|
|
|
|
try
|
|
|
|
{
|
|
|
|
// get image
|
|
|
|
unsigned int * image = self->m_image->getImage();
|
|
|
|
return Py_BuildValue("s#", image, self->m_image->getBuffSize());
|
|
|
|
}
|
|
|
|
catch (Exception & exp)
|
|
|
|
{
|
|
|
|
exp.report();
|
|
|
|
}
|
|
|
|
Py_RETURN_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// get image size
|
|
|
|
PyObject * Image_getSize (PyImage * self, void * closure)
|
|
|
|
{
|
|
|
|
return Py_BuildValue("(hh)", self->m_image->getSize()[0],
|
|
|
|
self->m_image->getSize()[1]);
|
|
|
|
}
|
|
|
|
|
|
|
|
// refresh image
|
|
|
|
PyObject * Image_refresh (PyImage * self)
|
|
|
|
{
|
|
|
|
self->m_image->refresh();
|
|
|
|
Py_RETURN_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// get scale
|
|
|
|
PyObject * Image_getScale (PyImage * self, void * closure)
|
|
|
|
{
|
|
|
|
if (self->m_image != NULL && self->m_image->getScale()) Py_RETURN_TRUE;
|
|
|
|
else Py_RETURN_FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// set scale
|
|
|
|
int Image_setScale (PyImage * self, PyObject * value, void * closure)
|
|
|
|
{
|
|
|
|
// check parameter, report failure
|
|
|
|
if (value == NULL || !PyBool_Check(value))
|
|
|
|
{
|
|
|
|
PyErr_SetString(PyExc_TypeError, "The value must be a bool");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
// set scale
|
|
|
|
if (self->m_image != NULL) self->m_image->setScale(value == Py_True);
|
|
|
|
// success
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// get flip
|
|
|
|
PyObject * Image_getFlip (PyImage * self, void * closure)
|
|
|
|
{
|
|
|
|
if (self->m_image != NULL && self->m_image->getFlip()) Py_RETURN_TRUE;
|
|
|
|
else Py_RETURN_FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
// set flip
|
|
|
|
int Image_setFlip (PyImage * self, PyObject * value, void * closure)
|
|
|
|
{
|
|
|
|
// check parameter, report failure
|
|
|
|
if (value == NULL || !PyBool_Check(value))
|
|
|
|
{
|
|
|
|
PyErr_SetString(PyExc_TypeError, "The value must be a bool");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
// set scale
|
|
|
|
if (self->m_image != NULL) self->m_image->setFlip(value == Py_True);
|
|
|
|
// success
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// get filter source object
|
|
|
|
PyObject * Image_getSource (PyImage * self, PyObject * args)
|
|
|
|
{
|
|
|
|
// get arguments
|
|
|
|
char * id;
|
2009-06-08 20:08:19 +00:00
|
|
|
if (!PyArg_ParseTuple(args, "s:getSource", &id))
|
|
|
|
return NULL;
|
|
|
|
if (self->m_image != NULL)
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
{
|
|
|
|
// get source object
|
|
|
|
PyObject * src = reinterpret_cast<PyObject*>(self->m_image->getSource(id));
|
|
|
|
// if source is available
|
|
|
|
if (src != NULL)
|
|
|
|
{
|
|
|
|
// return source
|
|
|
|
Py_INCREF(src);
|
|
|
|
return src;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// source was not found
|
|
|
|
Py_RETURN_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set filter source object
|
|
|
|
PyObject * Image_setSource (PyImage * self, PyObject * args)
|
|
|
|
{
|
|
|
|
// get arguments
|
|
|
|
char * id;
|
|
|
|
PyObject * obj;
|
2009-06-08 20:08:19 +00:00
|
|
|
if (!PyArg_ParseTuple(args, "sO:setSource", &id, &obj))
|
|
|
|
return NULL;
|
|
|
|
if (self->m_image != NULL)
|
VideoTexture module.
The only compilation system that works for sure is the MSVC project files. I've tried my best to
update the other compilation system but I count on the community to check and fix them.
This is Zdeno Miklas video texture plugin ported to trunk.
The original plugin API is maintained (can be found here http://home.scarlet.be/~tsi46445/blender/blendVideoTex.html)
EXCEPT for the following:
The module name is changed to VideoTexture (instead of blendVideoTex).
A new (and only) video source is now available: VideoFFmpeg()
You must pass 1 to 4 arguments when you create it (you can use named arguments):
VideoFFmpeg(file) : play a video file
VideoFFmpeg(file, capture, rate, width, height) : start a live video capture
file:
In the first form, file is a video file name, relative to startup directory.
It can also be a URL, FFmpeg will happily stream a video from a network source.
In the second form, file is empty or is a hint for the format of the video capture.
In Windows, file is ignored and should be empty or not specified.
In Linux, ffmpeg supports two types of device: VideoForLinux and DV1394.
The user specifies the type of device with the file parameter:
[<device_type>][:<standard>]
<device_type> : 'v4l' for VideoForLinux, 'dv1394' for DV1394; default to 'v4l'
<standard> : 'pal', 'secam' or 'ntsc', default to 'ntsc'
The driver name is constructed automatically from the device types:
v4l : /dev/video<capture>
dv1394: /dev/dv1394/<capture>
If you have different driver name, you can specify the driver name explicitely
instead of device type. Examples of valid file parameter:
/dev/v4l/video0:pal
/dev/ieee1394/1:ntsc
dv1394:ntsc
v4l:pal
:secam
capture:
Defines the index number of the capture source, starting from 0. The first capture device is always 0.
The VideoTexutre modules knows that you want to start a live video capture when you set this parameter to a number >= 0. Setting this parameter < 0 indicates a video file playback. Default value is -1.
rate:
the capture frame rate, by default 25 frames/sec
width:
height:
Width and height of the video capture in pixel, default value 0.
In Windows you must specify these values and they must fit with the capture device capability.
For example, if you have a webcam that can capture at 160x120, 320x240 or 640x480,
you must specify one of these couple of values or the opening of the video source will fail.
In Linux, default values are provided by the VideoForLinux driver if you don't specify width and height.
Simple example
**************
1. Texture definition script:
import VideoTexture
contr = GameLogic.getCurrentController()
obj = contr.getOwner()
if not hasattr(GameLogic, 'video'):
matID = VideoTexture.materialID(obj, 'MAVideoMat')
GameLogic.video = VideoTexture.Texture(obj, matID)
GameLogic.vidSrc = VideoTexture.VideoFFmpeg('trailer_400p.ogg')
# Streaming is also possible:
#GameLogic.vidSrc = VideoTexture.VideoFFmpeg('http://10.32.1.10/trailer_400p.ogg')
GameLogic.vidSrc.repeat = -1
# If the video dimensions are not a power of 2, scaling must be done before
# sending the texture to the GPU. This is done by default with gluScaleImage()
# but you can also use a faster, but less precise, scaling by setting scale
# to True. Best approach is to convert the video offline and set the dimensions right.
GameLogic.vidSrc.scale = True
# FFmpeg always delivers the video image upside down, so flipping is enabled automatically
#GameLogic.vidSrc.flip = True
if contr.getSensors()[0].isPositive():
GameLogic.video.source = GameLogic.vidSrc
GameLogic.vidSrc.play()
2. Texture refresh script:
obj = GameLogic.getCurrentController().getOwner()
if hasattr(GameLogic, 'video') != 0:
GameLogic.video.refresh(True)
You can download this demo here:
http://home.scarlet.be/~tsi46445/blender/VideoTextureDemo.blend
http://home.scarlet.be/~tsi46445/blender/trailer_400p.ogg
2008-10-31 22:35:52 +00:00
|
|
|
{
|
|
|
|
// check type of object
|
|
|
|
if (pyImageTypes.in(obj->ob_type))
|
|
|
|
{
|
|
|
|
// convert to image struct
|
|
|
|
PyImage * img = reinterpret_cast<PyImage*>(obj);
|
|
|
|
// set source
|
|
|
|
if (!self->m_image->setSource(id, img))
|
|
|
|
{
|
|
|
|
// if not set, retport error
|
|
|
|
PyErr_SetString(PyExc_RuntimeError, "Invalid source or id");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// else report error
|
|
|
|
else
|
|
|
|
{
|
|
|
|
PyErr_SetString(PyExc_RuntimeError, "Invalid type of object");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// return none
|
|
|
|
Py_RETURN_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// get pixel filter object
|
|
|
|
PyObject * Image_getFilter (PyImage * self, void * closure)
|
|
|
|
{
|
|
|
|
// if image object is available
|
|
|
|
if (self->m_image != NULL)
|
|
|
|
{
|
|
|
|
// pixel filter object
|
|
|
|
PyObject * filt = reinterpret_cast<PyObject*>(self->m_image->getFilter());
|
|
|
|
// if filter is present
|
|
|
|
if (filt != NULL)
|
|
|
|
{
|
|
|
|
// return it
|
|
|
|
Py_INCREF(filt);
|
|
|
|
return filt;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// otherwise return none
|
|
|
|
Py_RETURN_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// set pixel filter object
|
|
|
|
int Image_setFilter (PyImage * self, PyObject * value, void * closure)
|
|
|
|
{
|
|
|
|
// if image object is available
|
|
|
|
if (self->m_image != NULL)
|
|
|
|
{
|
|
|
|
// check new value
|
|
|
|
if (value == NULL || !pyFilterTypes.in(value->ob_type))
|
|
|
|
{
|
|
|
|
// report value error
|
|
|
|
PyErr_SetString(PyExc_TypeError, "Invalid type of value");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
// set new value
|
|
|
|
self->m_image->setFilter(reinterpret_cast<PyFilter*>(value));
|
|
|
|
}
|
|
|
|
// return success
|
|
|
|
return 0;
|
2008-11-01 15:58:49 +00:00
|
|
|
}
|