Code refactor: nodify Cycles camera and fix some mistakes in XML node read.

Differential Revision: https://developer.blender.org/D2016
This commit is contained in:
Brecht Van Lommel 2016-05-08 00:28:21 +02:00
parent 9d5aead88f
commit 98ad473324
8 changed files with 137 additions and 130 deletions

@ -281,46 +281,14 @@ static ShaderSocketType xml_read_socket_type(pugi::xml_node node, const char *na
/* Camera */
static void xml_read_camera(const XMLReadState& state, pugi::xml_node node)
static void xml_read_camera(XMLReadState& state, pugi::xml_node node)
{
Camera *cam = state.scene->camera;
xml_read_int(&cam->width, node, "width");
xml_read_int(&cam->height, node, "height");
if(xml_read_float(&cam->fov, node, "fov"))
cam->fov = DEG2RADF(cam->fov);
xml_read_float(&cam->nearclip, node, "nearclip");
xml_read_float(&cam->farclip, node, "farclip");
xml_read_float(&cam->aperturesize, node, "aperturesize"); // 0.5*focallength/fstop
xml_read_float(&cam->focaldistance, node, "focaldistance");
xml_read_float(&cam->shuttertime, node, "shuttertime");
xml_read_float(&cam->aperture_ratio, node, "aperture_ratio");
if(xml_equal_string(node, "type", "orthographic"))
cam->type = CAMERA_ORTHOGRAPHIC;
else if(xml_equal_string(node, "type", "perspective"))
cam->type = CAMERA_PERSPECTIVE;
else if(xml_equal_string(node, "type", "panorama"))
cam->type = CAMERA_PANORAMA;
if(xml_equal_string(node, "panorama_type", "equirectangular"))
cam->panorama_type = PANORAMA_EQUIRECTANGULAR;
else if(xml_equal_string(node, "panorama_type", "fisheye_equidistant"))
cam->panorama_type = PANORAMA_FISHEYE_EQUIDISTANT;
else if(xml_equal_string(node, "panorama_type", "fisheye_equisolid"))
cam->panorama_type = PANORAMA_FISHEYE_EQUISOLID;
xml_read_float(&cam->fisheye_fov, node, "fisheye_fov");
xml_read_float(&cam->fisheye_lens, node, "fisheye_lens");
xml_read_bool(&cam->use_spherical_stereo, node, "use_spherical_stereo");
xml_read_float(&cam->interocular_distance, node, "interocular_distance");
xml_read_float(&cam->convergence_distance, node, "convergence_distance");
xml_read_float(&cam->sensorwidth, node, "sensorwidth");
xml_read_float(&cam->sensorheight, node, "sensorheight");
xml_read_node(state, cam, node);
cam->matrix = state.tfm;

@ -37,7 +37,7 @@ struct BlenderCamera {
float lens;
float shuttertime;
Camera::MotionPosition motion_position;
float shutter_curve[RAMP_TABLE_SIZE];
array<float> shutter_curve;
Camera::RollingShutterType rolling_shutter_type;
float rolling_shutter_duration;
@ -460,7 +460,7 @@ static void blender_camera_sync(Camera *cam, BlenderCamera *bcam, int width, int
cam->rolling_shutter_type = bcam->rolling_shutter_type;
cam->rolling_shutter_duration = bcam->rolling_shutter_duration;
memcpy(cam->shutter_curve, bcam->shutter_curve, sizeof(cam->shutter_curve));
cam->shutter_curve = bcam->shutter_curve;
/* border */
cam->border = bcam->border;

@ -98,11 +98,12 @@ static inline void curvemapping_minmax(/*const*/ BL::CurveMapping& cumap,
}
static inline void curvemapping_to_array(BL::CurveMapping& cumap,
float *data,
array<float>& data,
int size)
{
cumap.update();
BL::CurveMap curve = cumap.curves[0];
data.resize(size);
for(int i = 0; i < size; i++) {
float t = (float)i/(float)(size-1);
data[i] = curve.evaluate(t);

@ -104,6 +104,11 @@ void Node::set(const SocketType& input, float3 value)
get_socket_value<float3>(this, input) = value;
}
void Node::set(const SocketType& input, const char *value)
{
set(input, ustring(value));
}
void Node::set(const SocketType& input, ustring value)
{
if(input.type == SocketType::STRING) {

@ -41,6 +41,7 @@ struct Node
void set(const SocketType& input, float value);
void set(const SocketType& input, float2 value);
void set(const SocketType& input, float3 value);
void set(const SocketType& input, const char *value);
void set(const SocketType& input, ustring value);
void set(const SocketType& input, const Transform& value);
void set(const SocketType& input, Node *value);

@ -117,8 +117,9 @@ void xml_read_node(XMLReader& reader, Node *node, pugi::xml_node xml_node)
array<int> value;
value.resize(tokens.size());
for(size_t i = 0; i < value.size(); i++)
for(size_t i = 0; i < value.size(); i++) {
value[i] = (int)atoi(attr.value());
}
node->set(socket, value);
break;
}
@ -127,7 +128,7 @@ void xml_read_node(XMLReader& reader, Node *node, pugi::xml_node xml_node)
case SocketType::POINT:
case SocketType::NORMAL:
{
array<float> value;
array<float3> value;
xml_read_float_array<3>(value, attr);
if(value.size() == 1) {
node->set(socket, value[0]);
@ -161,11 +162,21 @@ void xml_read_node(XMLReader& reader, Node *node, pugi::xml_node xml_node)
break;
}
case SocketType::STRING:
case SocketType::ENUM:
{
node->set(socket, attr.value());
break;
}
case SocketType::ENUM:
{
ustring value(attr.value());
if(socket.enum_values->exists(value)) {
node->set(socket, value);
}
else {
fprintf(stderr, "Unknown value \"%s\" for attribute \"%s\".\n", value.c_str(), socket.name.c_str());
}
break;
}
case SocketType::STRING_ARRAY:
{
vector<string> tokens;
@ -173,8 +184,9 @@ void xml_read_node(XMLReader& reader, Node *node, pugi::xml_node xml_node)
array<ustring> value;
value.resize(tokens.size());
for(size_t i = 0; i < value.size(); i++)
for(size_t i = 0; i < value.size(); i++) {
value[i] = ustring(tokens[i]);
}
node->set(socket, value);
break;
}

@ -30,70 +30,126 @@
CCL_NAMESPACE_BEGIN
static float shutter_curve_eval(float x,
float shutter_curve[RAMP_TABLE_SIZE])
array<float>& shutter_curve)
{
x *= RAMP_TABLE_SIZE;
if (shutter_curve.size() == 0)
return 1.0f;
x *= shutter_curve.size();
int index = (int)x;
float frac = x - index;
if(index < RAMP_TABLE_SIZE - 1) {
if(index < shutter_curve.size() - 1) {
return lerp(shutter_curve[index], shutter_curve[index + 1], frac);
}
else {
return shutter_curve[RAMP_TABLE_SIZE - 1];
return shutter_curve[shutter_curve.size() - 1];
}
}
Camera::Camera()
NODE_DEFINE(Camera)
{
NodeType* type = NodeType::add("camera", create);
SOCKET_FLOAT(shuttertime, "Shutter Time", 1.0f);
static NodeEnum motion_position_enum;
motion_position_enum.insert("start", MOTION_POSITION_START);
motion_position_enum.insert("center", MOTION_POSITION_CENTER);
motion_position_enum.insert("end", MOTION_POSITION_END);
SOCKET_ENUM(motion_position, "Motion Position", motion_position_enum, MOTION_POSITION_CENTER);
static NodeEnum rolling_shutter_type_enum;
rolling_shutter_type_enum.insert("none", ROLLING_SHUTTER_NONE);
rolling_shutter_type_enum.insert("top", ROLLING_SHUTTER_TOP);
SOCKET_ENUM(rolling_shutter_type, "Rolling Shutter Type", rolling_shutter_type_enum, ROLLING_SHUTTER_NONE);
SOCKET_FLOAT(rolling_shutter_duration, "Rolling Shutter Duration", 0.1f);
SOCKET_FLOAT_ARRAY(shutter_curve, "Shutter Curve", array<float>());
SOCKET_FLOAT(aperturesize, "Aperture Size", 0.0f);
SOCKET_FLOAT(focaldistance, "Focal Distance", 10.0f);
SOCKET_INT(blades, "Blades", 0);
SOCKET_FLOAT(bladesrotation, "Blades Rotation", 0.0f);
SOCKET_TRANSFORM(matrix, "Matrix", transform_identity());
SOCKET_FLOAT(aperture_ratio, "Aperture Ratio", 1.0f);
static NodeEnum type_enum;
type_enum.insert("perspective", CAMERA_PERSPECTIVE);
type_enum.insert("orthograph", CAMERA_ORTHOGRAPHIC);
type_enum.insert("panorama", CAMERA_PANORAMA);
SOCKET_ENUM(type, "Type", type_enum, CAMERA_PERSPECTIVE);
static NodeEnum panorama_type_enum;
panorama_type_enum.insert("equirectangular", PANORAMA_EQUIRECTANGULAR);
panorama_type_enum.insert("mirrorball", PANORAMA_MIRRORBALL);
panorama_type_enum.insert("fisheye_equidistant", PANORAMA_FISHEYE_EQUIDISTANT);
panorama_type_enum.insert("fisheye_equisolid", PANORAMA_FISHEYE_EQUISOLID);
SOCKET_ENUM(panorama_type, "Panorama Type", panorama_type_enum, PANORAMA_EQUIRECTANGULAR);
SOCKET_FLOAT(fisheye_fov, "Fisheye FOV", M_PI_F);
SOCKET_FLOAT(fisheye_lens, "Fisheye Lens", 10.5f);
SOCKET_FLOAT(latitude_min, "Latitude Min", -M_PI_2_F);
SOCKET_FLOAT(latitude_max, "Latitude Max", M_PI_2_F);
SOCKET_FLOAT(longitude_min, "Longitude Min", -M_PI_F);
SOCKET_FLOAT(longitude_max, "Longitude Max", M_PI_F);
SOCKET_FLOAT(fov, "FOV", M_PI_4_F);
SOCKET_FLOAT(fov_pre, "FOV Pre", M_PI_4_F);
SOCKET_FLOAT(fov_post, "FOV Post", M_PI_4_F);
static NodeEnum stereo_eye_enum;
stereo_eye_enum.insert("none", STEREO_NONE);
stereo_eye_enum.insert("left", STEREO_LEFT);
stereo_eye_enum.insert("right", STEREO_RIGHT);
SOCKET_ENUM(stereo_eye, "Stereo Eye", stereo_eye_enum, STEREO_NONE);
SOCKET_FLOAT(interocular_distance, "Interocular Distance", 0.065f);
SOCKET_FLOAT(convergence_distance, "Convergence Distance", 30.0f * 0.065f);
SOCKET_BOOLEAN(use_pole_merge, "Use Pole Merge", false);
SOCKET_FLOAT(pole_merge_angle_from, "Pole Merge Angle From", 60.0f * M_PI_F / 180.0f);
SOCKET_FLOAT(pole_merge_angle_to, "Pole Merge Angle To", 75.0f * M_PI_F / 180.0f);
SOCKET_FLOAT(sensorwidth, "Sensor Width", 0.036f);
SOCKET_FLOAT(sensorheight, "Sensor Height", 0.024f);
SOCKET_FLOAT(nearclip, "Near Clip", 1e-5f);
SOCKET_FLOAT(farclip, "Far Clip", 1e5f);
SOCKET_FLOAT(viewplane.left, "Viewplane Left", 0);
SOCKET_FLOAT(viewplane.right, "Viewplane Right", 0);
SOCKET_FLOAT(viewplane.bottom, "Viewplane Bottom", 0);
SOCKET_FLOAT(viewplane.top, "Viewplane Top", 0);
SOCKET_FLOAT(border.left, "Border Left", 0);
SOCKET_FLOAT(border.right, "Border Right", 0);
SOCKET_FLOAT(border.bottom, "Border Bottom", 0);
SOCKET_FLOAT(border.top, "Border Top", 0);
return type;
}
Camera::Camera()
: Node(node_type)
{
shuttertime = 1.0f;
motion_position = MOTION_POSITION_CENTER;
shutter_table_offset = TABLE_OFFSET_INVALID;
aperturesize = 0.0f;
focaldistance = 10.0f;
blades = 0;
bladesrotation = 0.0f;
matrix = transform_identity();
width = 1024;
height = 512;
resolution = 1;
motion.pre = transform_identity();
motion.post = transform_identity();
use_motion = false;
use_perspective_motion = false;
aperture_ratio = 1.0f;
shutter_curve.resize(RAMP_TABLE_SIZE);
for(int i = 0; i < shutter_curve.size(); ++i) {
shutter_curve[i] = 1.0f;
}
type = CAMERA_PERSPECTIVE;
panorama_type = PANORAMA_EQUIRECTANGULAR;
fisheye_fov = M_PI_F;
fisheye_lens = 10.5f;
latitude_min = -M_PI_2_F;
latitude_max = M_PI_2_F;
longitude_min = -M_PI_F;
longitude_max = M_PI_F;
fov = M_PI_4_F;
fov_pre = fov_post = fov;
stereo_eye = STEREO_NONE;
interocular_distance = 0.065f;
convergence_distance = 30.0f * 0.065f;
use_pole_merge = false;
pole_merge_angle_from = 60.0f * M_PI_F / 180.0f;
pole_merge_angle_to = 75.0f * M_PI_F / 180.0f;
sensorwidth = 0.036f;
sensorheight = 0.024f;
nearclip = 1e-5f;
farclip = 1e5f;
width = 1024;
height = 512;
resolution = 1;
viewplane.left = -((float)width/(float)height);
viewplane.right = (float)width/(float)height;
viewplane.bottom = -1.0f;
viewplane.top = 1.0f;
compute_auto_viewplane();
screentoworld = transform_identity();
rastertoworld = transform_identity();
@ -109,16 +165,6 @@ Camera::Camera()
need_device_update = true;
need_flags_update = true;
previous_need_motion = -1;
/* Initialize shutter curve. */
const int num_shutter_points = sizeof(shutter_curve) / sizeof(*shutter_curve);
for(int i = 0; i < num_shutter_points; ++i) {
shutter_curve[i] = 1.0f;
}
/* Initialize rolling shutter effect. */
rolling_shutter_type = ROLLING_SHUTTER_NONE;
rolling_shutter_duration = 0.1f;
}
Camera::~Camera()
@ -436,40 +482,11 @@ void Camera::device_free(Device * /*device*/,
scene->lookup_tables->remove_table(&shutter_table_offset);
}
bool Camera::modified(const Camera& cam)
{
return !((shuttertime == cam.shuttertime) &&
(aperturesize == cam.aperturesize) &&
(blades == cam.blades) &&
(bladesrotation == cam.bladesrotation) &&
(focaldistance == cam.focaldistance) &&
(type == cam.type) &&
(fov == cam.fov) &&
(nearclip == cam.nearclip) &&
(farclip == cam.farclip) &&
(sensorwidth == cam.sensorwidth) &&
(sensorheight == cam.sensorheight) &&
// modified for progressive render
// (width == cam.width) &&
// (height == cam.height) &&
(viewplane == cam.viewplane) &&
(border == cam.border) &&
(matrix == cam.matrix) &&
(aperture_ratio == cam.aperture_ratio) &&
(panorama_type == cam.panorama_type) &&
(fisheye_fov == cam.fisheye_fov) &&
(fisheye_lens == cam.fisheye_lens) &&
(latitude_min == cam.latitude_min) &&
(latitude_max == cam.latitude_max) &&
(longitude_min == cam.longitude_min) &&
(longitude_max == cam.longitude_max) &&
(stereo_eye == cam.stereo_eye));
}
bool Camera::motion_modified(const Camera& cam)
{
return !((motion == cam.motion) &&
(use_motion == cam.use_motion));
(use_motion == cam.use_motion) &&
(use_perspective_motion == cam.use_perspective_motion));
}
void Camera::tag_update()

@ -19,6 +19,8 @@
#include "kernel_types.h"
#include "node.h"
#include "util_boundbox.h"
#include "util_transform.h"
#include "util_types.h"
@ -35,8 +37,10 @@ class Scene;
* Renderman, and Blender after remapping.
*/
class Camera {
class Camera : public Node {
public:
NODE_DECLARE;
/* Specifies an offset for the shutter's time interval. */
enum MotionPosition {
/* Shutter opens at the current frame. */
@ -69,7 +73,7 @@ public:
/* motion blur */
float shuttertime;
MotionPosition motion_position;
float shutter_curve[RAMP_TABLE_SIZE];
array<float> shutter_curve;
size_t shutter_table_offset;
/* ** Rolling shutter effect. ** */
@ -177,7 +181,6 @@ public:
void device_update_volume(Device *device, DeviceScene *dscene, Scene *scene);
void device_free(Device *device, DeviceScene *dscene, Scene *scene);
bool modified(const Camera& cam);
bool motion_modified(const Camera& cam);
void tag_update();