svn merge ^/trunk/blender -r42991:43009

This commit is contained in:
Campbell Barton 2011-12-31 03:07:14 +00:00
commit 81f5c9ed9b
65 changed files with 2506 additions and 639 deletions

@ -349,6 +349,14 @@ def preset_paths(subdir):
raise Exception("invalid subdir given %r" % subdir)
elif _os.path.isdir(directory):
dirs.append(directory)
# Find addons preset paths
import addon_utils
for path in addon_utils.paths():
directory = _os.path.join(path, "presets", subdir)
if _os.path.isdir(directory):
dirs.append(directory)
return dirs

@ -0,0 +1,5 @@
import bpy
track = bpy.context.edit_movieclip.tracking.tracks.active
track.color = (1.0, 0.0, 1.0)
track.use_custom_color = True

@ -76,6 +76,7 @@ class AddPresetBase():
if hasattr(self, "add"):
self.add(context, filepath)
else:
print("Writing Preset: %r" % filepath)
file_preset = open(filepath, 'w')
file_preset.write("import bpy\n")

@ -755,7 +755,16 @@ class ConstraintButtonsPanel():
col = layout.column()
col.prop(con, "rotation_range", text="Pivot When")
@staticmethod
def _getConstraintClip(context, con):
if not con.use_active_clip:
return con.clip
else:
return context.scene.active_clip
def FOLLOW_TRACK(self, context, layout, con):
clip = self._getConstraintClip(context, con)
row = layout.row()
row.prop(con, "use_active_clip")
row.prop(con, "use_3d_position")
@ -763,7 +772,11 @@ class ConstraintButtonsPanel():
if not con.use_active_clip:
layout.prop(con, "clip")
layout.prop(con, "track")
if clip:
layout.prop_search(con, "object", clip.tracking, "objects", icon='OBJECT_DATA')
layout.prop_search(con, "track", clip.tracking, "tracks", icon='ANIMATION_DATA')
layout.prop(con, "camera")
layout.operator("clip.constraint_to_fcurve")
@ -775,6 +788,26 @@ class ConstraintButtonsPanel():
layout.operator("clip.constraint_to_fcurve")
def OBJECT_SOLVER(self, context, layout, con):
scene = context.scene
clip = self._getConstraintClip(context, con)
layout.prop(con, "use_active_clip")
if not con.use_active_clip:
layout.prop(con, "clip")
if clip:
layout.prop_search(con, "object", clip.tracking, "objects", icon='OBJECT_DATA')
layout.prop(con, "camera")
row = layout.row()
row.operator("constraint.objectsolver_set_inverse")
row.operator("constraint.objectsolver_clear_inverse")
layout.operator("clip.constraint_to_fcurve")
def SCRIPT(self, context, layout, con):
layout.label("Blender 2.5 has no py-constraints")

@ -316,6 +316,9 @@ class RENDER_PT_shading(RenderButtonsPanel, Panel):
col = split.column()
col.prop(rd, "use_raytrace", text="Ray Tracing")
col.prop(rd, "use_color_management")
sub = col.row()
sub.active = rd.use_color_management == True
sub.prop(rd, "use_color_unpremultiply")
col.prop(rd, "alpha_mode", text="Alpha")

@ -70,7 +70,13 @@ class CLIP_HT_header(Header):
row.template_ID(sc, "clip", open='clip.open')
if clip:
r = clip.tracking.reconstruction
tracking = clip.tracking
active = tracking.objects.active
if active and not active.is_camera:
r = active.reconstruction
else:
r = tracking.reconstruction
if r.is_valid:
layout.label(text="Average solve error: %.4f" %
@ -197,10 +203,18 @@ class CLIP_PT_tools_solve(Panel):
def draw(self, context):
layout = self.layout
clip = context.space_data.clip
settings = clip.tracking.settings
tracking = clip.tracking
settings = tracking.settings
tracking_object = tracking.objects.active
col = layout.column(align=True)
col.operator("clip.solve_camera", text="Camera Motion")
if tracking_object.is_camera:
solve_text = "Camera Motion"
else:
solve_text = "Object Motion"
col.operator("clip.solve_camera", text=solve_text)
col.operator("clip.clear_solution")
col = layout.column(align=True)
@ -208,6 +222,7 @@ class CLIP_PT_tools_solve(Panel):
col.prop(settings, "keyframe_b")
col = layout.column(align=True)
col.active = tracking_object.is_camera
col.label(text="Refine:")
col.prop(settings, "refine_intrinsics", text="")
@ -287,6 +302,39 @@ class CLIP_PT_tools_orientation(Panel):
col.prop(settings, "distance")
class CLIP_PT_tools_object(Panel):
bl_space_type = 'CLIP_EDITOR'
bl_region_type = 'TOOLS'
bl_label = "Object"
@classmethod
def poll(cls, context):
sc = context.space_data
clip = sc.clip
if clip and sc.mode == 'RECONSTRUCTION':
tracking_object = clip.tracking.objects.active
return not tracking_object.is_camera
return False
def draw(self, context):
sc = context.space_data
clip = sc.clip
layout = self.layout
tracking_object = clip.tracking.objects.active
settings = sc.clip.tracking.settings
col = layout.column()
col.prop(tracking_object, "scale")
col.separator()
col.operator("clip.set_solution_scale", text="Set Scale")
col.prop(settings, "object_distance")
class CLIP_PT_tools_grease_pencil(Panel):
bl_space_type = 'CLIP_EDITOR'
bl_region_type = 'TOOLS'
@ -316,6 +364,37 @@ class CLIP_PT_tools_grease_pencil(Panel):
row.prop(context.tool_settings, "use_grease_pencil_sessions")
class CLIP_PT_objects(Panel):
bl_space_type = 'CLIP_EDITOR'
bl_region_type = 'UI'
bl_label = "Objects"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
sc = context.space_data
return sc.clip
def draw(self, context):
layout = self.layout
sc = context.space_data
clip = context.space_data.clip
tracking = clip.tracking
row = layout.row()
row.template_list(tracking, "objects", tracking, "active_object_index", rows=3)
sub = row.column(align=True)
sub.operator("clip.tracking_object_new", icon='ZOOMIN', text="")
sub.operator("clip.tracking_object_remove", icon='ZOOMOUT', text="")
active = tracking.objects.active
if active:
layout.prop(active, "name")
class CLIP_PT_track(Panel):
bl_space_type = 'CLIP_EDITOR'
bl_region_type = 'UI'
@ -352,9 +431,15 @@ class CLIP_PT_track(Panel):
layout.template_track(sc, "scopes")
row = layout.row(align=True)
row.prop(act_track, "use_red_channel", text="R", toggle=True)
row.prop(act_track, "use_green_channel", text="G", toggle=True)
row.prop(act_track, "use_blue_channel", text="B", toggle=True)
sub = row.row()
sub.prop(act_track, "use_red_channel", text="R", toggle=True)
sub.prop(act_track, "use_green_channel", text="G", toggle=True)
sub.prop(act_track, "use_blue_channel", text="B", toggle=True)
row.separator()
sub = row.row()
sub.prop(act_track, "use_grayscale_preview", text="B/W", toggle=True)
layout.separator()
@ -422,7 +507,7 @@ class CLIP_PT_tracking_camera(Panel):
col.operator("clip.set_center_principal", text="Center")
col = layout.column(align=True)
col.label(text="Undistortion:")
col.label(text="Lens Distortion:")
col.prop(clip.tracking.camera, "k1")
col.prop(clip.tracking.camera, "k2")
col.prop(clip.tracking.camera, "k3")

@ -34,11 +34,13 @@
struct bGPDlayer;
struct ImBuf;
struct ListBase;
struct MovieReconstructContext;
struct MovieTrackingTrack;
struct MovieTrackingMarker;
struct MovieTracking;
struct MovieTrackingContext;
struct MovieTrackingObject;
struct MovieClipUser;
struct MovieDistortion;
struct Camera;
@ -49,8 +51,8 @@ void BKE_tracking_init_settings(struct MovieTracking *tracking);
void BKE_tracking_clamp_track(struct MovieTrackingTrack *track, int event);
void BKE_tracking_track_flag(struct MovieTrackingTrack *track, int area, int flag, int clear);
struct MovieTrackingTrack *BKE_tracking_add_track(struct MovieTracking *tracking, float x, float y,
int framenr, int width, int height);
struct MovieTrackingTrack *BKE_tracking_add_track(struct MovieTracking *tracking, struct ListBase *tracksbase,
float x, float y, int framenr, int width, int height);
void BKE_tracking_insert_marker(struct MovieTrackingTrack *track, struct MovieTrackingMarker *marker);
void BKE_tracking_delete_marker(struct MovieTrackingTrack *track, int framenr);
@ -72,40 +74,54 @@ struct ImBuf *BKE_tracking_get_pattern_imbuf(struct ImBuf *ibuf, struct MovieTra
struct ImBuf *BKE_tracking_get_search_imbuf(struct ImBuf *ibuf, struct MovieTrackingTrack *track,
struct MovieTrackingMarker *marker, int margin, int anchored, float pos[2], int origin[2]);
void BKE_track_unique_name(struct MovieTracking *tracking, struct MovieTrackingTrack *track);
void BKE_track_unique_name(struct ListBase *tracksbase, struct MovieTrackingTrack *track);
struct MovieTrackingTrack *BKE_tracking_named_track(struct MovieTracking *tracking, const char *name);
struct MovieTrackingTrack *BKE_tracking_indexed_track(struct MovieTracking *tracking, int tracknr);
struct MovieTrackingTrack *BKE_tracking_named_track(struct MovieTracking *tracking, struct MovieTrackingObject *object, const char *name);
struct MovieTrackingTrack *BKE_tracking_indexed_track(struct MovieTracking *tracking, int tracknr, struct ListBase **tracksbase_r);
void BKE_tracking_camera_shift(struct MovieTracking *tracking, int winx, int winy, float *shiftx, float *shifty);
void BKE_tracking_camera_to_blender(struct MovieTracking *tracking, struct Scene *scene, struct Camera *camera, int width, int height);
void BKE_get_tracking_mat(struct Scene *scene, struct Object *ob, float mat[4][4]);
void BKE_tracking_projection_matrix(struct MovieTracking *tracking, int framenr, int winx, int winy, float mat[4][4]);
void BKE_tracking_projection_matrix(struct MovieTracking *tracking, struct MovieTrackingObject *object,
int framenr, int winx, int winy, float mat[4][4]);
struct ListBase *BKE_tracking_get_tracks(struct MovieTracking *tracking);
struct MovieTrackingReconstruction *BKE_tracking_get_reconstruction(struct MovieTracking *tracking);
struct MovieTrackingTrack *BKE_tracking_active_track(struct MovieTracking *tracking);
struct MovieTrackingObject *BKE_tracking_active_object(struct MovieTracking *tracking);
struct MovieTrackingObject *BKE_tracking_get_camera_object(struct MovieTracking *tracking);
struct ListBase *BKE_tracking_object_tracks(struct MovieTracking *tracking, struct MovieTrackingObject *object);
struct MovieTrackingReconstruction *BKE_tracking_object_reconstruction(struct MovieTracking *tracking,
struct MovieTrackingObject *object);
/* 2D tracking */
struct MovieTrackingContext *BKE_tracking_context_new(struct MovieClip *clip, struct MovieClipUser *user,
short backwards, short disable_failed, short sequence);
short backwards, short sequence);
void BKE_tracking_context_free(struct MovieTrackingContext *context);
void BKE_tracking_sync(struct MovieTrackingContext *context);
void BKE_tracking_sync_user(struct MovieClipUser *user, struct MovieTrackingContext *context);
int BKE_tracking_next(struct MovieTrackingContext *context);
/* Camera solving */
int BKE_tracking_can_reconstruct(struct MovieTracking *tracking, char *error_msg, int error_size);
int BKE_tracking_can_reconstruct(struct MovieTracking *tracking, struct MovieTrackingObject *object,
char *error_msg, int error_size);
struct MovieReconstructContext* BKE_tracking_reconstruction_context_new(struct MovieTracking *tracking,
int keyframe1, int keyframe2, int width, int height);
struct MovieTrackingObject *object, int keyframe1, int keyframe2, int width, int height);
void BKE_tracking_reconstruction_context_free(struct MovieReconstructContext *context);
void BKE_tracking_solve_reconstruction(struct MovieReconstructContext *context,
short *stop, short *do_update, float *progress, char *stats_message, int message_size);
int BKE_tracking_finish_reconstruction(struct MovieReconstructContext *context, struct MovieTracking *tracking);
struct MovieReconstructedCamera *BKE_tracking_get_reconstructed_camera(struct MovieTracking *tracking, int framenr);
void BKE_tracking_get_interpolated_camera(struct MovieTracking *tracking, int framenr, float mat[4][4]);
struct MovieReconstructedCamera *BKE_tracking_get_reconstructed_camera(struct MovieTracking *tracking,
struct MovieTrackingObject *object, int framenr);
void BKE_tracking_get_interpolated_camera(struct MovieTracking *tracking,
struct MovieTrackingObject *object, int framenr, float mat[4][4]);
/* Feature detection */
void BKE_tracking_detect_fast(struct MovieTracking *tracking, struct ImBuf *imbuf,
void BKE_tracking_detect_fast(struct MovieTracking *tracking, struct ListBase *tracksbase, struct ImBuf *imbuf,
int framenr, int margin, int min_trackness, int min_distance, struct bGPDlayer *layer,
int place_outside_layer);
@ -127,8 +143,14 @@ void BKE_tracking_distortion_destroy(struct MovieDistortion *distortion);
struct ImBuf *BKE_tracking_undistort(struct MovieTracking *tracking, struct ImBuf *ibuf, int width, int height, float overscan);
struct ImBuf *BKE_tracking_distort(struct MovieTracking *tracking, struct ImBuf *ibuf, int width, int height, float overscan);
/* Object tracking */
struct MovieTrackingObject *BKE_tracking_new_object(struct MovieTracking *tracking, const char *name);
void BKE_tracking_remove_object(struct MovieTracking *tracking, struct MovieTrackingObject *object);
void BKE_tracking_object_unique_name(struct MovieTracking *tracking, struct MovieTrackingObject *object);
struct MovieTrackingObject *BKE_tracking_named_object(struct MovieTracking *tracking, const char *name);
/* Select */
void BKE_tracking_select_track(struct MovieTracking *tracking, struct MovieTrackingTrack *track, int area, int extend);
void BKE_tracking_select_track(struct ListBase *tracksbase, struct MovieTrackingTrack *track, int area, int extend);
void BKE_tracking_deselect_track(struct MovieTrackingTrack *track, int area);
#define TRACK_SELECTED(track) ((((track)->flag&TRACK_HIDDEN)==0) && ((track)->flag&SELECT || (track)->pat_flag&SELECT || (track)->search_flag&SELECT))

@ -827,7 +827,7 @@ static void childof_evaluate (bConstraint *con, bConstraintOb *cob, ListBase *ta
{
bChildOfConstraint *data= con->data;
bConstraintTarget *ct= targets->first;
/* only evaluate if there is a target */
if (VALID_CONS_TARGET(ct)) {
float parmat[4][4];
@ -1257,6 +1257,7 @@ static void followpath_get_tarmat (bConstraint *con, bConstraintOb *cob, bConstr
float quat[4];
if ((data->followflag & FOLLOWPATH_STATIC) == 0) {
/* animated position along curve depending on time */
Nurb *nu = cu->nurb.first;
curvetime= cu->ctime - data->offset;
/* ctime is now a proper var setting of Curve which gets set by Animato like any other var that's animated,
@ -1266,7 +1267,18 @@ static void followpath_get_tarmat (bConstraint *con, bConstraintOb *cob, bConstr
* factor, which then gets clamped to lie within 0.0 - 1.0 range
*/
curvetime /= cu->pathlen;
CLAMP(curvetime, 0.0f, 1.0f);
if (nu && nu->flagu & CU_NURB_CYCLIC) {
/* If the curve is cyclic, enable looping around if the time is
* outside the bounds 0..1 */
if ((curvetime < 0.0f) || (curvetime > 1.0f)) {
curvetime -= floor(curvetime);
}
}
else {
/* The curve is not cyclic, so clamp to the begin/end points. */
CLAMP(curvetime, 0.0f, 1.0f);
}
}
else {
/* fixed position along curve */
@ -3939,6 +3951,7 @@ static void followtrack_id_looper (bConstraint *con, ConstraintIDFunc func, void
bFollowTrackConstraint *data= con->data;
func(con, (ID**)&data->clip, userdata);
func(con, (ID**)&data->camera, userdata);
}
static void followtrack_evaluate (bConstraint *con, bConstraintOb *cob, ListBase *UNUSED(targets))
@ -3946,105 +3959,126 @@ static void followtrack_evaluate (bConstraint *con, bConstraintOb *cob, ListBase
Scene *scene= cob->scene;
bFollowTrackConstraint *data= con->data;
MovieClip *clip= data->clip;
MovieTracking *tracking;
MovieTrackingTrack *track;
MovieTrackingObject *tracking_object;
Object *camob= data->camera ? data->camera : scene->camera;
if (data->flag & FOLLOWTRACK_ACTIVECLIP)
clip= scene->clip;
if (!clip || !data->track[0])
if (!clip || !data->track[0] || !camob)
return;
track= BKE_tracking_named_track(&clip->tracking, data->track);
tracking= &clip->tracking;
if(data->object[0])
tracking_object= BKE_tracking_named_object(tracking, data->object);
else
tracking_object= BKE_tracking_get_camera_object(tracking);
if(!tracking_object)
return;
track= BKE_tracking_named_track(tracking, tracking_object, data->track);
if (!track)
return;
if (data->flag & FOLLOWTRACK_USE_3D_POSITION) {
if (track->flag & TRACK_HAS_BUNDLE) {
float pos[3], mat[4][4], obmat[4][4];
MovieTracking *tracking= &clip->tracking;
float obmat[4][4], mat[4][4];
copy_m4_m4(obmat, cob->matrix);
BKE_get_tracking_mat(cob->scene, NULL, mat);
mul_v3_m4v3(pos, mat, track->bundle_pos);
cob->matrix[3][0] += pos[0];
cob->matrix[3][1] += pos[1];
cob->matrix[3][2] += pos[2];
if((tracking_object->flag&TRACKING_OBJECT_CAMERA)==0) {
float imat[4][4];
copy_m4_m4(mat, camob->obmat);
BKE_tracking_get_interpolated_camera(tracking, tracking_object, scene->r.cfra, imat);
invert_m4(imat);
mul_serie_m4(cob->matrix, obmat, mat, imat, NULL, NULL, NULL, NULL, NULL);
translate_m4(cob->matrix, track->bundle_pos[0], track->bundle_pos[1], track->bundle_pos[2]);
}
else {
BKE_get_tracking_mat(cob->scene, camob, mat);
mult_m4_m4m4(cob->matrix, obmat, mat);
translate_m4(cob->matrix, track->bundle_pos[0], track->bundle_pos[1], track->bundle_pos[2]);
}
}
}
else {
Object *camob= cob->scene->camera;
if (camob) {
MovieClipUser user;
MovieTrackingMarker *marker;
float vec[3], disp[3], axis[3], mat[4][4];
float aspect= (scene->r.xsch*scene->r.xasp) / (scene->r.ysch*scene->r.yasp);
float len, d;
where_is_object_mat(scene, camob, mat);
/* camera axis */
vec[0]= 0.0f;
vec[1]= 0.0f;
vec[2]= 1.0f;
mul_v3_m4v3(axis, mat, vec);
/* distance to projection plane */
copy_v3_v3(vec, cob->matrix[3]);
sub_v3_v3(vec, mat[3]);
project_v3_v3v3(disp, vec, axis);
len= len_v3(disp);
if (len > FLT_EPSILON) {
CameraParams params;
float pos[2], rmat[4][4];
user.framenr= scene->r.cfra;
marker= BKE_tracking_get_marker(track, user.framenr);
add_v2_v2v2(pos, marker->pos, track->offset);
camera_params_init(&params);
camera_params_from_object(&params, camob);
MovieClipUser user;
MovieTrackingMarker *marker;
float vec[3], disp[3], axis[3], mat[4][4];
float aspect= (scene->r.xsch*scene->r.xasp) / (scene->r.ysch*scene->r.yasp);
float len, d;
if (params.is_ortho) {
vec[0]= params.ortho_scale * (pos[0]-0.5f+params.shiftx);
vec[1]= params.ortho_scale * (pos[1]-0.5f+params.shifty);
vec[2]= -len;
if (aspect > 1.0f) vec[1] /= aspect;
else vec[0] *= aspect;
mul_v3_m4v3(disp, camob->obmat, vec);
copy_m4_m4(rmat, camob->obmat);
zero_v3(rmat[3]);
mult_m4_m4m4(cob->matrix, cob->matrix, rmat);
copy_v3_v3(cob->matrix[3], disp);
}
else {
d= (len*params.sensor_x) / (2.0f*params.lens);
vec[0]= d*(2.0f*(pos[0]+params.shiftx)-1.0f);
vec[1]= d*(2.0f*(pos[1]+params.shifty)-1.0f);
vec[2]= -len;
if (aspect > 1.0f) vec[1] /= aspect;
else vec[0] *= aspect;
mul_v3_m4v3(disp, camob->obmat, vec);
/* apply camera rotation so Z-axis would be co-linear */
copy_m4_m4(rmat, camob->obmat);
zero_v3(rmat[3]);
mult_m4_m4m4(cob->matrix, cob->matrix, rmat);
copy_v3_v3(cob->matrix[3], disp);
}
where_is_object_mat(scene, camob, mat);
/* camera axis */
vec[0]= 0.0f;
vec[1]= 0.0f;
vec[2]= 1.0f;
mul_v3_m4v3(axis, mat, vec);
/* distance to projection plane */
copy_v3_v3(vec, cob->matrix[3]);
sub_v3_v3(vec, mat[3]);
project_v3_v3v3(disp, vec, axis);
len= len_v3(disp);
if (len > FLT_EPSILON) {
CameraParams params;
float pos[2], rmat[4][4];
user.framenr= scene->r.cfra;
marker= BKE_tracking_get_marker(track, user.framenr);
add_v2_v2v2(pos, marker->pos, track->offset);
camera_params_init(&params);
camera_params_from_object(&params, camob);
if (params.is_ortho) {
vec[0]= params.ortho_scale * (pos[0]-0.5f+params.shiftx);
vec[1]= params.ortho_scale * (pos[1]-0.5f+params.shifty);
vec[2]= -len;
if (aspect > 1.0f) vec[1] /= aspect;
else vec[0] *= aspect;
mul_v3_m4v3(disp, camob->obmat, vec);
copy_m4_m4(rmat, camob->obmat);
zero_v3(rmat[3]);
mult_m4_m4m4(cob->matrix, cob->matrix, rmat);
copy_v3_v3(cob->matrix[3], disp);
}
else {
d= (len*params.sensor_x) / (2.0f*params.lens);
vec[0]= d*(2.0f*(pos[0]+params.shiftx)-1.0f);
vec[1]= d*(2.0f*(pos[1]+params.shifty)-1.0f);
vec[2]= -len;
if (aspect > 1.0f) vec[1] /= aspect;
else vec[0] *= aspect;
mul_v3_m4v3(disp, camob->obmat, vec);
/* apply camera rotation so Z-axis would be co-linear */
copy_m4_m4(rmat, camob->obmat);
zero_v3(rmat[3]);
mult_m4_m4m4(cob->matrix, cob->matrix, rmat);
copy_v3_v3(cob->matrix[3], disp);
}
}
}
@ -4088,16 +4122,19 @@ static void camerasolver_evaluate (bConstraint *con, bConstraintOb *cob, ListBas
Scene *scene= cob->scene;
bCameraSolverConstraint *data= con->data;
MovieClip *clip= data->clip;
if (data->flag & CAMERASOLVER_ACTIVECLIP)
clip= scene->clip;
if (clip) {
float mat[4][4], obmat[4][4];
BKE_tracking_get_interpolated_camera(&clip->tracking, scene->r.cfra, mat);
MovieTracking *tracking= &clip->tracking;
MovieTrackingObject *object= BKE_tracking_get_camera_object(tracking);
BKE_tracking_get_interpolated_camera(tracking, object, scene->r.cfra, mat);
copy_m4_m4(obmat, cob->matrix);
mult_m4_m4m4(cob->matrix, obmat, mat);
}
}
@ -4118,6 +4155,80 @@ static bConstraintTypeInfo CTI_CAMERASOLVER = {
camerasolver_evaluate /* evaluate */
};
/* ----------- Object Solver ------------- */
static void objectsolver_new_data (void *cdata)
{
bObjectSolverConstraint *data= (bObjectSolverConstraint *)cdata;
data->clip = NULL;
data->flag |= OBJECTSOLVER_ACTIVECLIP;
unit_m4(data->invmat);
}
static void objectsolver_id_looper (bConstraint *con, ConstraintIDFunc func, void *userdata)
{
bObjectSolverConstraint *data= con->data;
func(con, (ID**)&data->clip, userdata);
func(con, (ID**)&data->camera, userdata);
}
static void objectsolver_evaluate (bConstraint *con, bConstraintOb *cob, ListBase *UNUSED(targets))
{
Scene *scene= cob->scene;
bObjectSolverConstraint *data= con->data;
MovieClip *clip= data->clip;
Object *camob= data->camera ? data->camera : scene->camera;
if (data->flag & OBJECTSOLVER_ACTIVECLIP)
clip= scene->clip;
if(!camob || !clip)
return;
if (clip) {
MovieTracking *tracking= &clip->tracking;
MovieTrackingObject *object;
object= BKE_tracking_named_object(tracking, data->object);
if(object) {
float mat[4][4], obmat[4][4], imat[4][4], cammat[4][4], camimat[4][4], parmat[4][4];
where_is_object_mat(scene, camob, cammat);
BKE_tracking_get_interpolated_camera(tracking, object, scene->r.cfra, mat);
invert_m4_m4(camimat, cammat);
mult_m4_m4m4(parmat, cammat, data->invmat);
copy_m4_m4(cammat, camob->obmat);
copy_m4_m4(obmat, cob->matrix);
invert_m4_m4(imat, mat);
mul_serie_m4(cob->matrix, cammat, imat, camimat, parmat, obmat, NULL, NULL, NULL);
}
}
}
static bConstraintTypeInfo CTI_OBJECTSOLVER = {
CONSTRAINT_TYPE_OBJECTSOLVER, /* type */
sizeof(bObjectSolverConstraint), /* size */
"Object Solver", /* name */
"bObjectSolverConstraint", /* struct name */
NULL, /* free data */
NULL, /* relink data */
objectsolver_id_looper, /* id looper */
NULL, /* copy data */
objectsolver_new_data, /* new data */
NULL, /* get constraint targets */
NULL, /* flush constraint targets */
NULL, /* get target matrix */
objectsolver_evaluate /* evaluate */
};
/* ************************* Constraints Type-Info *************************** */
/* All of the constraints api functions use bConstraintTypeInfo structs to carry out
* and operations that involve constraint specific code.
@ -4158,6 +4269,7 @@ static void constraints_init_typeinfo (void)
constraintsTypeInfo[25]= &CTI_PIVOT; /* Pivot Constraint */
constraintsTypeInfo[26]= &CTI_FOLLOWTRACK; /* Follow Track Constraint */
constraintsTypeInfo[27]= &CTI_CAMERASOLVER; /* Camera Solver Constraint */
constraintsTypeInfo[28]= &CTI_OBJECTSOLVER; /* Object Solver Constraint */
}
/* This function should be used for getting the appropriate type-info when only

@ -649,16 +649,21 @@ static void build_dag_object(DagForest *dag, DagNode *scenenode, Scene *scene, O
continue;
/* special case for camera tracking -- it doesn't use targets to define relations */
if(ELEM(cti->type, CONSTRAINT_TYPE_FOLLOWTRACK, CONSTRAINT_TYPE_CAMERASOLVER)) {
if(ELEM3(cti->type, CONSTRAINT_TYPE_FOLLOWTRACK, CONSTRAINT_TYPE_CAMERASOLVER, CONSTRAINT_TYPE_OBJECTSOLVER)) {
int depends_on_camera= 0;
if(cti->type==CONSTRAINT_TYPE_FOLLOWTRACK) {
bFollowTrackConstraint *data= (bFollowTrackConstraint *)con->data;
if((data->clip || data->flag&FOLLOWTRACK_ACTIVECLIP) && data->track[0]) {
if(scene->camera) {
node2 = dag_get_node(dag, scene->camera);
dag_add_relation(dag, node2, node, DAG_RL_DATA_OB|DAG_RL_OB_OB, cti->name);
}
}
if((data->clip || data->flag&FOLLOWTRACK_ACTIVECLIP) && data->track[0])
depends_on_camera= 1;
}
else if(cti->type==CONSTRAINT_TYPE_OBJECTSOLVER)
depends_on_camera= 1;
if(depends_on_camera && scene->camera) {
node2 = dag_get_node(dag, scene->camera);
dag_add_relation(dag, node2, node, DAG_RL_DATA_OB|DAG_RL_OB_OB, cti->name);
}
dag_add_relation(dag,scenenode,node,DAG_RL_SCENE, "Scene Relation");
@ -2165,7 +2170,7 @@ static void dag_object_time_update_flags(Object *ob)
if (cti) {
/* special case for camera tracking -- it doesn't use targets to define relations */
if(ELEM(cti->type, CONSTRAINT_TYPE_FOLLOWTRACK, CONSTRAINT_TYPE_CAMERASOLVER)) {
if(ELEM3(cti->type, CONSTRAINT_TYPE_FOLLOWTRACK, CONSTRAINT_TYPE_CAMERASOLVER, CONSTRAINT_TYPE_OBJECTSOLVER)) {
ob->recalc |= OB_RECALC_OB;
}
else if (cti->get_constraint_targets) {

@ -285,6 +285,10 @@ static void image_assign_ibuf(Image *ima, ImBuf *ibuf, int index, int frame)
break;
ibuf->index= index;
if(ima->flag & IMA_CM_PREDIVIDE)
ibuf->flags |= IB_cm_predivide;
else
ibuf->flags &= ~IB_cm_predivide;
/* this function accepts link==NULL */
BLI_insertlinkbefore(&ima->ibufs, link, ibuf);
@ -2304,9 +2308,17 @@ static ImBuf *image_get_render_result(Image *ima, ImageUser *iuser, void **lock_
/* since its possible to access the buffer from the image directly, set the profile [#25073] */
ibuf->profile= (iuser->scene->r.color_mgt_flag & R_COLOR_MANAGEMENT) ? IB_PROFILE_LINEAR_RGB : IB_PROFILE_NONE;
ibuf->dither= dither;
if(iuser->scene->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE) {
ibuf->flags |= IB_cm_predivide;
ima->flag |= IMA_CM_PREDIVIDE;
}
else {
ibuf->flags &= ~IB_cm_predivide;
ima->flag &= ~IMA_CM_PREDIVIDE;
}
ima->ok= IMA_OK_LOADED;
return ibuf;

@ -816,7 +816,8 @@ void BKE_movieclip_reload(MovieClip *clip)
void BKE_movieclip_update_scopes(MovieClip *clip, MovieClipUser *user, MovieClipScopes *scopes)
{
if(scopes->ok) return;
if(scopes->ok)
return;
if(scopes->track_preview) {
IMB_freeImBuf(scopes->track_preview);
@ -827,8 +828,10 @@ void BKE_movieclip_update_scopes(MovieClip *clip, MovieClipUser *user, MovieClip
scopes->track= NULL;
if(clip) {
if(clip->tracking.act_track) {
MovieTrackingTrack *track= clip->tracking.act_track;
MovieTrackingTrack *act_track= BKE_tracking_active_track(&clip->tracking);
if(act_track) {
MovieTrackingTrack *track= act_track;
MovieTrackingMarker *marker= BKE_tracking_get_marker(track, user->framenr);
if(marker->flag&MARKER_DISABLED) {

@ -55,6 +55,7 @@
#include "BKE_movieclip.h"
#include "BKE_object.h"
#include "BKE_scene.h"
#include "BKE_main.h" // XXX: ...
#include "IMB_imbuf_types.h"
#include "IMB_imbuf.h"
@ -85,11 +86,14 @@ void BKE_tracking_init_settings(MovieTracking *tracking)
tracking->settings.keyframe1= 1;
tracking->settings.keyframe2= 30;
tracking->settings.dist= 1;
tracking->settings.object_distance= 1;
tracking->stabilization.scaleinf= 1.0f;
tracking->stabilization.locinf= 1.0f;
tracking->stabilization.rotinf= 1.0f;
tracking->stabilization.maxscale= 2.0f;
BKE_tracking_new_object(tracking, "Camera");
}
void BKE_tracking_clamp_track(MovieTrackingTrack *track, int event)
@ -208,7 +212,7 @@ void BKE_tracking_track_flag(MovieTrackingTrack *track, int area, int flag, int
}
}
MovieTrackingTrack *BKE_tracking_add_track(MovieTracking *tracking, float x, float y,
MovieTrackingTrack *BKE_tracking_add_track(MovieTracking *tracking, ListBase *tracksbase, float x, float y,
int framenr, int width, int height)
{
MovieTrackingTrack *track;
@ -251,8 +255,8 @@ MovieTrackingTrack *BKE_tracking_add_track(MovieTracking *tracking, float x, flo
if(track->tracker == TRACKER_KLT)
BKE_tracking_clamp_track(track, CLAMP_PYRAMID_LEVELS);
BLI_addtail(&tracking->tracks, track);
BKE_track_unique_name(tracking, track);
BLI_addtail(tracksbase, track);
BKE_track_unique_name(tracksbase, track);
return track;
}
@ -524,18 +528,44 @@ void BKE_tracking_join_tracks(MovieTrackingTrack *dst_track, MovieTrackingTrack
dst_track->markersnr= tot;
}
void BKE_tracking_free(MovieTracking *tracking)
static void tracking_tracks_free(ListBase *tracks)
{
MovieTrackingTrack *track;
for(track= tracking->tracks.first; track; track= track->next) {
for(track= tracks->first; track; track= track->next) {
BKE_tracking_free_track(track);
}
BLI_freelistN(&tracking->tracks);
BLI_freelistN(tracks);
}
if(tracking->reconstruction.cameras)
MEM_freeN(tracking->reconstruction.cameras);
static void tracking_reconstruction_free(MovieTrackingReconstruction *reconstruction)
{
if(reconstruction->cameras)
MEM_freeN(reconstruction->cameras);
}
static void tracking_object_free(MovieTrackingObject *object)
{
tracking_tracks_free(&object->tracks);
tracking_reconstruction_free(&object->reconstruction);
}
static void tracking_objects_free(ListBase *objects)
{
MovieTrackingObject *object;
for(object= objects->first; object; object= object->next)
tracking_object_free(object);
BLI_freelistN(objects);
}
void BKE_tracking_free(MovieTracking *tracking)
{
tracking_tracks_free(&tracking->tracks);
tracking_reconstruction_free(&tracking->reconstruction);
tracking_objects_free(&tracking->objects);
if(tracking->stabilization.scaleibuf)
IMB_freeImBuf(tracking->stabilization.scaleibuf);
@ -547,6 +577,9 @@ void BKE_tracking_free(MovieTracking *tracking)
/*********************** tracks map *************************/
typedef struct TracksMap {
char object_name[32];
int is_camera;
int num_tracks;
int customdata_size;
@ -558,10 +591,13 @@ typedef struct TracksMap {
int ptr;
} TracksMap;
static TracksMap *tracks_map_new(int num_tracks, int customdata_size)
static TracksMap *tracks_map_new(const char *object_name, int is_camera, int num_tracks, int customdata_size)
{
TracksMap *map= MEM_callocN(sizeof(TracksMap), "TrackingsMap");
strcpy(map->object_name, object_name);
map->is_camera= is_camera;
map->num_tracks= num_tracks;
map->customdata_size= customdata_size;
@ -607,10 +643,24 @@ static void tracks_map_insert(TracksMap *map, MovieTrackingTrack *track, void *c
static void tracks_map_merge(TracksMap *map, MovieTracking *tracking)
{
MovieTrackingTrack *track;
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
ListBase tracks= {NULL, NULL}, new_tracks= {NULL, NULL};
ListBase *old_tracks= &tracking->tracks;
ListBase *old_tracks;
int a;
if(map->is_camera) {
old_tracks= &tracking->tracks;
} else {
MovieTrackingObject *object= BKE_tracking_named_object(tracking, map->object_name);
if(!object) {
/* object was deleted by user, create new one */
object= BKE_tracking_new_object(tracking, map->object_name);
}
old_tracks= &object->tracks;
}
/* duplicate currently operating tracks to temporary list.
this is needed to keep names in unique state and it's faster to change names
of currently operating tracks (if needed) */
@ -634,7 +684,7 @@ static void tracks_map_merge(TracksMap *map, MovieTracking *tracking)
/* original track was found, re-use flags and remove this track */
if(cur) {
if(cur==tracking->act_track)
if(act_track)
replace_sel= 1;
track->flag= cur->flag;
@ -685,7 +735,7 @@ static void tracks_map_merge(TracksMap *map, MovieTracking *tracking)
track= next;
}
tracking->tracks= new_tracks;
*old_tracks= new_tracks;
}
static void tracks_map_free(TracksMap *map, void (*customdata_free) (void *customdata))
@ -737,27 +787,28 @@ typedef struct MovieTrackingContext {
MovieTrackingSettings settings;
TracksMap *tracks_map;
short backwards, disable_failed, sequence;
short backwards, sequence;
int sync_frame;
} MovieTrackingContext;
MovieTrackingContext *BKE_tracking_context_new(MovieClip *clip, MovieClipUser *user, short backwards, short disable_failed, short sequence)
MovieTrackingContext *BKE_tracking_context_new(MovieClip *clip, MovieClipUser *user, short backwards, short sequence)
{
MovieTrackingContext *context= MEM_callocN(sizeof(MovieTrackingContext), "trackingContext");
MovieTracking *tracking= &clip->tracking;
MovieTrackingSettings *settings= &tracking->settings;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *track;
MovieTrackingObject *object= BKE_tracking_active_object(tracking);
int num_tracks= 0;
context->settings= *settings;
context->backwards= backwards;
context->disable_failed= disable_failed;
context->sync_frame= user->framenr;
context->first_time= 1;
context->sequence= sequence;
/* count */
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_SELECTED(track) && (track->flag&TRACK_LOCKED)==0) {
MovieTrackingMarker *marker= BKE_tracking_get_marker(track, user->framenr);
@ -772,12 +823,13 @@ MovieTrackingContext *BKE_tracking_context_new(MovieClip *clip, MovieClipUser *u
if(num_tracks) {
int width, height;
context->tracks_map= tracks_map_new(num_tracks, sizeof(TrackContext));
context->tracks_map= tracks_map_new(object->name, object->flag & TRACKING_OBJECT_CAMERA,
num_tracks, sizeof(TrackContext));
BKE_movieclip_get_size(clip, user, &width, &height);
/* create tracking data */
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_SELECTED(track) && (track->flag&TRACK_LOCKED)==0) {
MovieTrackingMarker *marker= BKE_tracking_get_marker(track, user->framenr);
@ -883,29 +935,53 @@ void BKE_tracking_context_free(MovieTrackingContext *context)
MEM_freeN(context);
}
static void disable_imbuf_channels(ImBuf *ibuf, MovieTrackingTrack *track)
/* zap channels from the imbuf that are disabled by the user. this can lead to
* better tracks sometimes. however, instead of simply zeroing the channels
* out, do a partial grayscale conversion so the display is better. */
static void disable_imbuf_channels(ImBuf *ibuf, MovieTrackingTrack *track, int grayscale)
{
int x, y;
float scale;
if((track->flag&(TRACK_DISABLE_RED|TRACK_DISABLE_GREEN|TRACK_DISABLE_BLUE))==0)
if((track->flag&(TRACK_DISABLE_RED|TRACK_DISABLE_GREEN|TRACK_DISABLE_BLUE))==0 && !grayscale)
return;
/* If only some components are selected, it's important to rescale the result
* appropriately so that e.g. if only blue is selected, it's not zeroed out. */
scale = ((track->flag&TRACK_DISABLE_RED ) ? 0.0f : 0.2126f) +
((track->flag&TRACK_DISABLE_GREEN) ? 0.0f : 0.7152f) +
((track->flag&TRACK_DISABLE_BLUE) ? 0.0f : 0.0722f);
for(y= 0; y<ibuf->y; y++) {
for (x= 0; x<ibuf->x; x++) {
int pixel= ibuf->x*y + x;
if(ibuf->rect_float) {
float *rrgbf= ibuf->rect_float + pixel*4;
if(track->flag&TRACK_DISABLE_RED) rrgbf[0]= 0;
if(track->flag&TRACK_DISABLE_GREEN) rrgbf[1]= 0;
if(track->flag&TRACK_DISABLE_BLUE) rrgbf[2]= 0;
float r = (track->flag&TRACK_DISABLE_RED) ? 0.0f : rrgbf[0];
float g = (track->flag&TRACK_DISABLE_GREEN) ? 0.0f : rrgbf[1];
float b = (track->flag&TRACK_DISABLE_BLUE) ? 0.0f : rrgbf[2];
if (grayscale) {
float gray = (0.2126f*r + 0.7152f*g + 0.0722f*b) / scale;
rrgbf[0] = rrgbf[1] = rrgbf[2] = gray;
} else {
rrgbf[0] = r;
rrgbf[1] = g;
rrgbf[2] = b;
}
} else {
char *rrgb= (char*)ibuf->rect + pixel*4;
if(track->flag&TRACK_DISABLE_RED) rrgb[0]= 0;
if(track->flag&TRACK_DISABLE_GREEN) rrgb[1]= 0;
if(track->flag&TRACK_DISABLE_BLUE) rrgb[2]= 0;
char r = (track->flag&TRACK_DISABLE_RED) ? 0 : rrgb[0];
char g = (track->flag&TRACK_DISABLE_GREEN) ? 0 : rrgb[1];
char b = (track->flag&TRACK_DISABLE_BLUE) ? 0 : rrgb[2];
if (grayscale) {
float gray = (0.2126f*r + 0.7152f*g + 0.0722f*b) / scale;
rrgb[0] = rrgb[1] = rrgb[2] = gray;
} else {
rrgb[0] = r;
rrgb[1] = g;
rrgb[2] = b;
}
}
}
}
@ -947,7 +1023,12 @@ static ImBuf *get_area_imbuf(ImBuf *ibuf, MovieTrackingTrack *track, MovieTracki
origin[1]= y1-margin;
}
disable_imbuf_channels(tmpibuf, track);
if ((track->flag & TRACK_PREVIEW_GRAYSCALE) ||
(track->flag & TRACK_DISABLE_RED) ||
(track->flag & TRACK_DISABLE_GREEN) ||
(track->flag & TRACK_DISABLE_BLUE) ) {
disable_imbuf_channels(tmpibuf, track, 1 /* grayscale */);
}
return tmpibuf;
}
@ -976,7 +1057,7 @@ static float *get_search_floatbuf(ImBuf *ibuf, MovieTrackingTrack *track, MovieT
height= (track->search_max[1]-track->search_min[1])*ibuf->y;
tmpibuf= BKE_tracking_get_search_imbuf(ibuf, track, marker, 0, 0, pos, origin);
disable_imbuf_channels(tmpibuf, track);
disable_imbuf_channels(tmpibuf, track, 0 /* don't grayscale */);
*width_r= width;
*height_r= height;
@ -988,14 +1069,11 @@ static float *get_search_floatbuf(ImBuf *ibuf, MovieTrackingTrack *track, MovieT
if(tmpibuf->rect_float) {
float *rrgbf= tmpibuf->rect_float + pixel*4;
*fp= 0.2126*rrgbf[0] + 0.7152*rrgbf[1] + 0.0722*rrgbf[2];
} else {
unsigned char *rrgb= (unsigned char*)tmpibuf->rect + pixel*4;
*fp= (0.2126*rrgb[0] + 0.7152*rrgb[1] + 0.0722*rrgb[2])/255.0f;
}
fp++;
}
}
@ -1017,14 +1095,11 @@ static unsigned char *get_ucharbuf(ImBuf *ibuf)
if(ibuf->rect_float) {
float *rrgbf= ibuf->rect_float + pixel*4;
*cp= FTOCHAR(0.2126f*rrgbf[0] + 0.7152f*rrgbf[1] + 0.0722f*rrgbf[2]);
} else {
unsigned char *rrgb= (unsigned char*)ibuf->rect + pixel*4;
*cp= 0.2126f*rrgb[0] + 0.7152f*rrgb[1] + 0.0722f*rrgb[2];
}
cp++;
}
}
@ -1039,7 +1114,7 @@ static unsigned char *get_search_bytebuf(ImBuf *ibuf, MovieTrackingTrack *track,
unsigned char *pixels;
tmpibuf= BKE_tracking_get_search_imbuf(ibuf, track, marker, 0, 0, pos, origin);
disable_imbuf_channels(tmpibuf, track);
disable_imbuf_channels(tmpibuf, track, 0 /* don't grayscale */);
*width_r= tmpibuf->x;
*height_r= tmpibuf->y;
@ -1303,7 +1378,7 @@ int BKE_tracking_next(MovieTrackingContext *context)
}
coords_correct= !onbound && !isnan(x2) && !isnan(y2) && finite(x2) && finite(y2);
if(coords_correct && (tracked || !context->disable_failed)) {
if(coords_correct && !onbound && tracked) {
if(context->first_time) {
#pragma omp critical
{
@ -1370,6 +1445,8 @@ typedef struct MovieReconstructContext {
struct libmv_Reconstruction *reconstruction;
#endif
char object_name[32];
int is_camera;
float focal_length;
float principal_point[2];
@ -1391,13 +1468,13 @@ typedef struct ReconstructProgressData {
} ReconstructProgressData;
#if WITH_LIBMV
static struct libmv_Tracks *create_libmv_tracks(MovieTracking *tracking, int width, int height)
static struct libmv_Tracks *create_libmv_tracks(ListBase *tracksbase, int width, int height)
{
int tracknr= 0;
MovieTrackingTrack *track;
struct libmv_Tracks *tracks= libmv_tracksNew();
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
int a= 0;
@ -1441,16 +1518,28 @@ static void retrieve_libmv_reconstruct_intrinscis(MovieReconstructContext *conte
static int retrieve_libmv_reconstruct_tracks(MovieReconstructContext *context, MovieTracking *tracking)
{
struct libmv_Reconstruction *libmv_reconstruction= context->reconstruction;
MovieTrackingReconstruction *reconstruction= &tracking->reconstruction;
MovieTrackingReconstruction *reconstruction= NULL;
MovieReconstructedCamera *reconstructed;
MovieTrackingTrack *track;
ListBase *tracksbase= NULL;
int ok= 1, tracknr= 0, a, origin_set= 0;
int sfra= context->sfra, efra= context->efra;
float imat[4][4];
if(context->is_camera) {
tracksbase= &tracking->tracks;
reconstruction= &tracking->reconstruction;
}
else {
MovieTrackingObject *object= BKE_tracking_named_object(tracking, context->object_name);
tracksbase= &object->tracks;
reconstruction= &object->reconstruction;
}
unit_m4(imat);
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
double pos[3];
@ -1516,7 +1605,7 @@ static int retrieve_libmv_reconstruct_tracks(MovieReconstructContext *context, M
}
if(origin_set) {
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(track->flag&TRACK_HAS_BUNDLE)
mul_v3_m4v3(track->bundle_pos, imat, track->bundle_pos);
@ -1532,19 +1621,20 @@ static int retrieve_libmv_reconstruct_tracks(MovieReconstructContext *context, M
static int retrieve_libmv_reconstruct(MovieReconstructContext *context, MovieTracking *tracking)
{
tracks_map_merge(context->tracks_map, tracking);
/* take the intrinscis back from libmv */
retrieve_libmv_reconstruct_intrinscis(context, tracking);
return retrieve_libmv_reconstruct_tracks(context, tracking);
}
static int get_refine_intrinsics_flags(MovieTracking *tracking)
static int get_refine_intrinsics_flags(MovieTracking *tracking, MovieTrackingObject *object)
{
int refine= tracking->settings.refine_camera_intrinsics;
int flags= 0;
if((object->flag&TRACKING_OBJECT_CAMERA)==0)
return 0;
if(refine&REFINE_FOCAL_LENGTH)
flags|= LIBMV_REFINE_FOCAL_LENGTH;
@ -1560,13 +1650,13 @@ static int get_refine_intrinsics_flags(MovieTracking *tracking)
return flags;
}
static int count_tracks_on_both_keyframes(MovieTracking *tracking)
static int count_tracks_on_both_keyframes(MovieTracking *tracking, ListBase *tracksbase)
{
int tot= 0;
int frame1= tracking->settings.keyframe1, frame2= tracking->settings.keyframe2;
MovieTrackingTrack *track;
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(BKE_tracking_has_marker(track, frame1))
if(BKE_tracking_has_marker(track, frame2))
@ -1579,10 +1669,12 @@ static int count_tracks_on_both_keyframes(MovieTracking *tracking)
}
#endif
int BKE_tracking_can_reconstruct(MovieTracking *tracking, char *error_msg, int error_size)
int BKE_tracking_can_reconstruct(MovieTracking *tracking, MovieTrackingObject *object, char *error_msg, int error_size)
{
#if WITH_LIBMV
if(count_tracks_on_both_keyframes(tracking)<8) {
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
if(count_tracks_on_both_keyframes(tracking, tracksbase)<8) {
BLI_strncpy(error_msg, "At least 8 tracks on both of keyframes are needed for reconstruction", error_size);
return 0;
}
@ -1597,24 +1689,29 @@ int BKE_tracking_can_reconstruct(MovieTracking *tracking, char *error_msg, int e
}
MovieReconstructContext* BKE_tracking_reconstruction_context_new(MovieTracking *tracking,
int keyframe1, int keyframe2, int width, int height)
MovieTrackingObject *object, int keyframe1, int keyframe2, int width, int height)
{
MovieReconstructContext *context= MEM_callocN(sizeof(MovieReconstructContext), "MovieReconstructContext data");
MovieTrackingCamera *camera= &tracking->camera;
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
float aspy= 1.0f/tracking->camera.pixel_aspect;
int num_tracks= BLI_countlist(&tracking->tracks);
int num_tracks= BLI_countlist(tracksbase);
int sfra= INT_MAX, efra= INT_MIN;
MovieTrackingTrack *track;
context->tracks_map= tracks_map_new(num_tracks, 0);
track= tracking->tracks.first;
strcpy(context->object_name, object->name);
context->is_camera = object->flag&TRACKING_OBJECT_CAMERA;
context->tracks_map= tracks_map_new(context->object_name, context->is_camera, num_tracks, 0);
track= tracksbase->first;
while(track) {
int first= 0, last= track->markersnr;
int first= 0, last= track->markersnr-1;
MovieTrackingMarker *first_marker= &track->markers[0];
MovieTrackingMarker *last_marker= &track->markers[track->markersnr-1];
/* find first not-disabled marker */
while(first<track->markersnr-1 && first_marker->flag&MARKER_DISABLED) {
while(first<=track->markersnr-1 && first_marker->flag&MARKER_DISABLED) {
first++;
first_marker++;
}
@ -1640,10 +1737,10 @@ MovieReconstructContext* BKE_tracking_reconstruction_context_new(MovieTracking *
context->efra= efra;
#ifdef WITH_LIBMV
context->tracks= create_libmv_tracks(tracking, width, height*aspy);
context->tracks= create_libmv_tracks(tracksbase, width, height*aspy);
context->keyframe1= keyframe1;
context->keyframe2= keyframe2;
context->refine_flags= get_refine_intrinsics_flags(tracking);
context->refine_flags= get_refine_intrinsics_flags(tracking, object);
#else
(void) width;
(void) height;
@ -1740,8 +1837,22 @@ void BKE_tracking_solve_reconstruction(MovieReconstructContext *context, short *
int BKE_tracking_finish_reconstruction(MovieReconstructContext *context, MovieTracking *tracking)
{
tracking->reconstruction.error= context->reprojection_error;
tracking->reconstruction.flag|= TRACKING_RECONSTRUCTED;
MovieTrackingReconstruction *reconstruction;
tracks_map_merge(context->tracks_map, tracking);
if(context->is_camera) {
reconstruction= &tracking->reconstruction;
}
else {
MovieTrackingObject *object;
object= BKE_tracking_named_object(tracking, context->object_name);
reconstruction= &object->reconstruction;
}
reconstruction->error= context->reprojection_error;
reconstruction->flag|= TRACKING_RECONSTRUCTED;
#ifdef WITH_LIBMV
if(!retrieve_libmv_reconstruct(context, tracking))
@ -1751,14 +1862,15 @@ int BKE_tracking_finish_reconstruction(MovieReconstructContext *context, MovieTr
return 1;
}
void BKE_track_unique_name(MovieTracking *tracking, MovieTrackingTrack *track)
void BKE_track_unique_name(ListBase *tracksbase, MovieTrackingTrack *track)
{
BLI_uniquename(&tracking->tracks, track, "Track", '.', offsetof(MovieTrackingTrack, name), sizeof(track->name));
BLI_uniquename(tracksbase, track, "Track", '.', offsetof(MovieTrackingTrack, name), sizeof(track->name));
}
MovieTrackingTrack *BKE_tracking_named_track(MovieTracking *tracking, const char *name)
MovieTrackingTrack *BKE_tracking_named_track(MovieTracking *tracking, MovieTrackingObject *object, const char *name)
{
MovieTrackingTrack *track= tracking->tracks.first;
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
MovieTrackingTrack *track= tracksbase->first;
while(track) {
if(!strcmp(track->name, name))
@ -1770,9 +1882,8 @@ MovieTrackingTrack *BKE_tracking_named_track(MovieTracking *tracking, const char
return NULL;
}
static int reconstruction_camera_index(MovieTracking *tracking, int framenr, int nearest)
static int reconstruction_camera_index(MovieTrackingReconstruction *reconstruction, int framenr, int nearest)
{
MovieTrackingReconstruction *reconstruction= &tracking->reconstruction;
MovieReconstructedCamera *cameras= reconstruction->cameras;
int a= 0, d= 1;
@ -1824,21 +1935,41 @@ static int reconstruction_camera_index(MovieTracking *tracking, int framenr, int
return -1;
}
MovieReconstructedCamera *BKE_tracking_get_reconstructed_camera(MovieTracking *tracking, int framenr)
static void scale_reconstructed_camera(MovieTrackingObject *object, float mat[4][4])
{
int a= reconstruction_camera_index(tracking, framenr, 0);
if((object->flag&TRACKING_OBJECT_CAMERA)==0) {
float smat[4][4];
scale_m4_fl(smat, 1.0f/object->scale);
mult_m4_m4m4(mat, mat, smat);
}
}
MovieReconstructedCamera *BKE_tracking_get_reconstructed_camera(MovieTracking *tracking,
MovieTrackingObject *object, int framenr)
{
MovieTrackingReconstruction *reconstruction;
int a;
reconstruction= BKE_tracking_object_reconstruction(tracking, object);
a= reconstruction_camera_index(reconstruction, framenr, 0);
if(a==-1)
return NULL;
return &tracking->reconstruction.cameras[a];
return &reconstruction->cameras[a];
}
void BKE_tracking_get_interpolated_camera(MovieTracking *tracking, int framenr, float mat[4][4])
void BKE_tracking_get_interpolated_camera(MovieTracking *tracking, MovieTrackingObject *object,
int framenr, float mat[4][4])
{
MovieTrackingReconstruction *reconstruction= &tracking->reconstruction;
MovieReconstructedCamera *cameras= reconstruction->cameras;
int a= reconstruction_camera_index(tracking, framenr, 1);
MovieTrackingReconstruction *reconstruction;
MovieReconstructedCamera *cameras;
int a;
reconstruction= BKE_tracking_object_reconstruction(tracking, object);
cameras= reconstruction->cameras;
a= reconstruction_camera_index(reconstruction, framenr, 1);
if(a==-1) {
unit_m4(mat);
@ -1852,6 +1983,8 @@ void BKE_tracking_get_interpolated_camera(MovieTracking *tracking, int framenr,
} else {
copy_m4_m4(mat, cameras[a].mat);
}
scale_reconstructed_camera(object, mat);
}
void BKE_get_tracking_mat(Scene *scene, Object *ob, float mat[4][4])
@ -1890,7 +2023,8 @@ void BKE_tracking_camera_to_blender(MovieTracking *tracking, Scene *scene, Camer
BKE_tracking_camera_shift(tracking, width, height, &camera->shiftx, &camera->shifty);
}
void BKE_tracking_projection_matrix(MovieTracking *tracking, int framenr, int winx, int winy, float mat[4][4])
void BKE_tracking_projection_matrix(MovieTracking *tracking, MovieTrackingObject *object,
int framenr, int winx, int winy, float mat[4][4])
{
MovieReconstructedCamera *camera;
float lens= tracking->camera.focal*tracking->camera.sensor_width/(float)winx;
@ -1923,7 +2057,8 @@ void BKE_tracking_projection_matrix(MovieTracking *tracking, int framenr, int wi
perspective_m4(winmat, left, right, bottom, top, clipsta, clipend);
camera= BKE_tracking_get_reconstructed_camera(tracking, framenr);
camera= BKE_tracking_get_reconstructed_camera(tracking, object, framenr);
if(camera) {
float imat[4][4];
@ -1932,6 +2067,77 @@ void BKE_tracking_projection_matrix(MovieTracking *tracking, int framenr, int wi
} else copy_m4_m4(mat, winmat);
}
ListBase *BKE_tracking_get_tracks(MovieTracking *tracking)
{
MovieTrackingObject *object= BKE_tracking_active_object(tracking);
if(object && (object->flag & TRACKING_OBJECT_CAMERA) == 0) {
return &object->tracks;
}
return &tracking->tracks;
}
MovieTrackingTrack *BKE_tracking_active_track(MovieTracking *tracking)
{
ListBase *tracksbase;
if(!tracking->act_track)
return NULL;
tracksbase= BKE_tracking_get_tracks(tracking);
/* check that active track is in current tracks list */
if(BLI_findindex(tracksbase, tracking->act_track) >= 0)
return tracking->act_track;
return NULL;
}
MovieTrackingObject *BKE_tracking_active_object(MovieTracking *tracking)
{
return BLI_findlink(&tracking->objects, tracking->objectnr);
}
MovieTrackingObject *BKE_tracking_get_camera_object(MovieTracking *tracking)
{
MovieTrackingObject *object= tracking->objects.first;
while(object) {
if(object->flag & TRACKING_OBJECT_CAMERA)
return object;
object= object->next;
}
return NULL;
}
ListBase *BKE_tracking_object_tracks(MovieTracking *tracking, MovieTrackingObject *object)
{
if(object->flag & TRACKING_OBJECT_CAMERA) {
return &tracking->tracks;
}
return &object->tracks;
}
MovieTrackingReconstruction *BKE_tracking_object_reconstruction(MovieTracking *tracking, MovieTrackingObject *object)
{
if(object->flag & TRACKING_OBJECT_CAMERA) {
return &tracking->reconstruction;
}
return &object->reconstruction;
}
MovieTrackingReconstruction *BKE_tracking_get_reconstruction(MovieTracking *tracking)
{
MovieTrackingObject *object= BKE_tracking_active_object(tracking);
return BKE_tracking_object_reconstruction(tracking, object);
}
void BKE_tracking_apply_intrinsics(MovieTracking *tracking, float co[2], float nco[2])
{
MovieTrackingCamera *camera= &tracking->camera;
@ -2018,8 +2224,9 @@ static int point_in_layer(bGPDlayer *layer, float x, float y)
return 0;
}
static void retrieve_libmv_features(MovieTracking *tracking, struct libmv_Features *features,
int framenr, int width, int height, bGPDlayer *layer, int place_outside_layer)
static void retrieve_libmv_features(MovieTracking *tracking, ListBase *tracksbase,
struct libmv_Features *features, int framenr, int width, int height,
bGPDlayer *layer, int place_outside_layer)
{
int a;
@ -2039,7 +2246,7 @@ static void retrieve_libmv_features(MovieTracking *tracking, struct libmv_Featur
ok= point_in_layer(layer, xu, yu)!=place_outside_layer;
if(ok) {
track= BKE_tracking_add_track(tracking, xu, yu, framenr, width, height);
track= BKE_tracking_add_track(tracking, tracksbase, xu, yu, framenr, width, height);
track->flag|= SELECT;
track->pat_flag|= SELECT;
track->search_flag|= SELECT;
@ -2048,7 +2255,7 @@ static void retrieve_libmv_features(MovieTracking *tracking, struct libmv_Featur
}
#endif
void BKE_tracking_detect_fast(MovieTracking *tracking, ImBuf *ibuf,
void BKE_tracking_detect_fast(MovieTracking *tracking, ListBase *tracksbase, ImBuf *ibuf,
int framenr, int margin, int min_trackness, int min_distance, bGPDlayer *layer,
int place_outside_layer)
{
@ -2056,11 +2263,13 @@ void BKE_tracking_detect_fast(MovieTracking *tracking, ImBuf *ibuf,
struct libmv_Features *features;
unsigned char *pixels= get_ucharbuf(ibuf);
features= libmv_detectFeaturesFAST(pixels, ibuf->x, ibuf->y, ibuf->x, margin, min_trackness, min_distance);
features= libmv_detectFeaturesFAST(pixels, ibuf->x, ibuf->y, ibuf->x,
margin, min_trackness, min_distance);
MEM_freeN(pixels);
retrieve_libmv_features(tracking, features, framenr, ibuf->x, ibuf->y, layer, place_outside_layer);
retrieve_libmv_features(tracking, tracksbase, features, framenr,
ibuf->x, ibuf->y, layer, place_outside_layer);
libmv_destroyFeatures(features);
#else
@ -2075,22 +2284,34 @@ void BKE_tracking_detect_fast(MovieTracking *tracking, ImBuf *ibuf,
#endif
}
MovieTrackingTrack *BKE_tracking_indexed_track(MovieTracking *tracking, int tracknr)
MovieTrackingTrack *BKE_tracking_indexed_track(MovieTracking *tracking, int tracknr, ListBase **tracksbase_r)
{
MovieTrackingTrack *track= tracking->tracks.first;
MovieTrackingObject *object;
int cur= 1;
while(track) {
if(track->flag&TRACK_HAS_BUNDLE) {
if(cur==tracknr)
return track;
object= tracking->objects.first;
while(object) {
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
MovieTrackingTrack *track= tracksbase->first;
cur++;
while(track) {
if(track->flag&TRACK_HAS_BUNDLE) {
if(cur==tracknr) {
*tracksbase_r= tracksbase;
return track;
}
cur++;
}
track= track->next;
}
track= track->next;
object= object->next;
}
*tracksbase_r= NULL;
return NULL;
}
@ -2102,6 +2323,8 @@ static int stabilization_median_point(MovieTracking *tracking, int framenr, floa
INIT_MINMAX2(min, max);
(void) tracking;
track= tracking->tracks.first;
while(track) {
if(track->flag&TRACK_USE_2D_STAB) {
@ -2539,12 +2762,12 @@ ImBuf *BKE_tracking_distort(MovieTracking *tracking, ImBuf *ibuf, int width, int
}
/* area - which part of marker should be selected. see TRACK_AREA_* constants */
void BKE_tracking_select_track(MovieTracking *tracking, MovieTrackingTrack *track, int area, int extend)
void BKE_tracking_select_track(ListBase *tracksbase, MovieTrackingTrack *track, int area, int extend)
{
if(extend) {
BKE_tracking_track_flag(track, area, SELECT, 0);
} else {
MovieTrackingTrack *cur= tracking->tracks.first;
MovieTrackingTrack *cur= tracksbase->first;
while(cur) {
if(cur==track) {
@ -2564,3 +2787,78 @@ void BKE_tracking_deselect_track(MovieTrackingTrack *track, int area)
{
BKE_tracking_track_flag(track, area, SELECT, 1);
}
MovieTrackingObject *BKE_tracking_new_object(MovieTracking *tracking, const char *name)
{
MovieTrackingObject *object= MEM_callocN(sizeof(MovieTrackingObject), "tracking object");
if(tracking->tot_object==0) {
/* first object is always camera */
BLI_strncpy(object->name, "Camera", sizeof(object->name));
object->flag|= TRACKING_OBJECT_CAMERA;
}
else {
BLI_strncpy(object->name, name, sizeof(object->name));
}
BLI_addtail(&tracking->objects, object);
tracking->tot_object++;
tracking->objectnr= BLI_countlist(&tracking->objects) - 1;
BKE_tracking_object_unique_name(tracking, object);
return object;
}
void BKE_tracking_remove_object(MovieTracking *tracking, MovieTrackingObject *object)
{
MovieTrackingTrack *track;
int index= BLI_findindex(&tracking->objects, object);
if(index<0)
return;
if(object->flag & TRACKING_OBJECT_CAMERA) {
/* object used for camera solving can't be deleted */
return;
}
track= object->tracks.first;
while(track) {
if(track==tracking->act_track)
tracking->act_track= NULL;
track= track->next;
}
tracking_object_free(object);
BLI_freelinkN(&tracking->objects, object);
tracking->tot_object--;
if(index>0)
tracking->objectnr= index-1;
else
tracking->objectnr= 0;
}
void BKE_tracking_object_unique_name(MovieTracking *tracking, MovieTrackingObject *object)
{
BLI_uniquename(&tracking->objects, object, "Object", '.', offsetof(MovieTrackingObject, name), sizeof(object->name));
}
MovieTrackingObject *BKE_tracking_named_object(MovieTracking *tracking, const char *name)
{
MovieTrackingObject *object= tracking->objects.first;
while(object) {
if(!strcmp(object->name, name))
return object;
object= object->next;
}
return NULL;
}

@ -195,8 +195,14 @@ void mul_m3_m3m3(float m1[][3], float m3_[][3], float m2_[][3])
m1[2][2]= m2[2][0]*m3[0][2] + m2[2][1]*m3[1][2] + m2[2][2]*m3[2][2];
}
void mul_m4_m4m3(float (*m1)[4], float (*m3)[4], float (*m2)[3])
void mul_m4_m4m3(float (*m1)[4], float (*m3_)[4], float (*m2_)[3])
{
float m2[3][3], m3[4][4];
/* copy so it works when m1 is the same pointer as m2 or m3 */
copy_m3_m3(m2, m2_);
copy_m4_m4(m3, m3_);
m1[0][0]= m2[0][0]*m3[0][0] + m2[0][1]*m3[1][0] + m2[0][2]*m3[2][0];
m1[0][1]= m2[0][0]*m3[0][1] + m2[0][1]*m3[1][1] + m2[0][2]*m3[2][1];
m1[0][2]= m2[0][0]*m3[0][2] + m2[0][1]*m3[1][2] + m2[0][2]*m3[2][2];

@ -6048,10 +6048,29 @@ static void lib_link_group(FileData *fd, Main *main)
/* ***************** READ MOVIECLIP *************** */
static void direct_link_movieReconstruction(FileData *fd, MovieTrackingReconstruction *reconstruction)
{
reconstruction->cameras= newdataadr(fd, reconstruction->cameras);
}
static void direct_link_movieTracks(FileData *fd, ListBase *tracksbase)
{
MovieTrackingTrack *track;
link_list(fd, tracksbase);
track= tracksbase->first;
while(track) {
track->markers= newdataadr(fd, track->markers);
track= track->next;
}
}
static void direct_link_movieclip(FileData *fd, MovieClip *clip)
{
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *track;
MovieTrackingObject *object;
if(fd->movieclipmap) clip->cache= newmclipadr(fd, clip->cache);
else clip->cache= NULL;
@ -6059,16 +6078,8 @@ static void direct_link_movieclip(FileData *fd, MovieClip *clip)
if(fd->movieclipmap) clip->tracking.camera.intrinsics= newmclipadr(fd, clip->tracking.camera.intrinsics);
else clip->tracking.camera.intrinsics= NULL;
tracking->reconstruction.cameras= newdataadr(fd, tracking->reconstruction.cameras);
link_list(fd, &tracking->tracks);
track= tracking->tracks.first;
while(track) {
track->markers= newdataadr(fd, track->markers);
track= track->next;
}
direct_link_movieTracks(fd, &tracking->tracks);
direct_link_movieReconstruction(fd, &tracking->reconstruction);
clip->tracking.act_track= newdataadr(fd, clip->tracking.act_track);
@ -6079,6 +6090,16 @@ static void direct_link_movieclip(FileData *fd, MovieClip *clip)
clip->tracking.stabilization.ok= 0;
clip->tracking.stabilization.scaleibuf= NULL;
clip->tracking.stabilization.rot_track= newdataadr(fd, clip->tracking.stabilization.rot_track);
link_list(fd, &tracking->objects);
object= tracking->objects.first;
while(object) {
direct_link_movieTracks(fd, &object->tracks);
direct_link_movieReconstruction(fd, &object->reconstruction);
object= object->next;
}
}
static void lib_link_movieclip(FileData *fd, Main *main)
@ -12852,8 +12873,43 @@ static void do_versions(FileData *fd, Library *lib, Main *main)
}
{
MovieClip *clip;
for(clip= main->movieclip.first; clip; clip= clip->id.next) {
Object *ob;
for (clip= main->movieclip.first; clip; clip= clip->id.next) {
MovieTracking *tracking= &clip->tracking;
MovieTrackingObject *tracking_object= tracking->objects.first;
clip->proxy.build_tc_flag|= IMB_TC_RECORD_RUN_NO_GAPS;
if(!tracking->settings.object_distance)
tracking->settings.object_distance= 1.0f;
if(tracking->objects.first == NULL)
BKE_tracking_new_object(tracking, "Camera");
while(tracking_object) {
if(!tracking_object->scale)
tracking_object->scale= 1.0f;
tracking_object= tracking_object->next;
}
}
for (ob= main->object.first; ob; ob= ob->id.next) {
bConstraint *con;
for (con= ob->constraints.first; con; con=con->next) {
bConstraintTypeInfo *cti= constraint_get_typeinfo(con);
if(!cti)
continue;
if(cti->type==CONSTRAINT_TYPE_OBJECTSOLVER) {
bObjectSolverConstraint *data= (bObjectSolverConstraint *)con->data;
if(data->invmat[3][3]==0.0f)
unit_m4(data->invmat);
}
}
}
}
}

@ -2618,6 +2618,27 @@ static void write_scripts(WriteData *wd, ListBase *idbase)
}
}
static void write_movieTracks(WriteData *wd, ListBase *tracks)
{
MovieTrackingTrack *track;
track= tracks->first;
while(track) {
writestruct(wd, DATA, "MovieTrackingTrack", 1, track);
if(track->markers)
writestruct(wd, DATA, "MovieTrackingMarker", track->markersnr, track->markers);
track= track->next;
}
}
static void write_movieReconstruction(WriteData *wd, MovieTrackingReconstruction *reconstruction)
{
if(reconstruction->camnr)
writestruct(wd, DATA, "MovieReconstructedCamera", reconstruction->camnr, reconstruction->cameras);
}
static void write_movieclips(WriteData *wd, ListBase *idbase)
{
MovieClip *clip;
@ -2626,20 +2647,20 @@ static void write_movieclips(WriteData *wd, ListBase *idbase)
while(clip) {
if(clip->id.us>0 || wd->current) {
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *track;
MovieTrackingObject *object;
writestruct(wd, ID_MC, "MovieClip", 1, clip);
if(tracking->reconstruction.camnr)
writestruct(wd, DATA, "MovieReconstructedCamera", tracking->reconstruction.camnr, tracking->reconstruction.cameras);
write_movieTracks(wd, &tracking->tracks);
write_movieReconstruction(wd, &tracking->reconstruction);
track= tracking->tracks.first;
while(track) {
writestruct(wd, DATA, "MovieTrackingTrack", 1, track);
object= tracking->objects.first;
while(object) {
writestruct(wd, DATA, "MovieTrackingObject", 1, object);
if(track->markers)
writestruct(wd, DATA, "MovieTrackingMarker", track->markersnr, track->markers);
write_movieTracks(wd, &object->tracks);
write_movieReconstruction(wd, &object->reconstruction);
track= track->next;
object= object->next;
}
}

@ -136,13 +136,8 @@ void glaDrawPixelsSafe (float x, float y, int img_w, int img_h, int row_w, int
* is expected to be in RGBA byte or float format, and the
* modelview and projection matrices are assumed to define a
* 1-to-1 mapping to screen space.
* @param gamma_correct Optionally gamma correct float sources to sRGB for display
*/
/* only for float rects, converts to 32 bits and draws */
void glaDrawPixelsSafe_to32(float fx, float fy, int img_w, int img_h, int row_w, float *rectf, int gamma_correct);
void glaDrawPixelsTex (float x, float y, int img_w, int img_h, int format, void *rect);
void glaDrawPixelsTexScaled(float x, float y, int img_w, int img_h, int format, void *rect, float scaleX, float scaleY);

@ -1465,6 +1465,8 @@ static ImBuf *scale_trackpreview_ibuf(ImBuf *ibuf, float zoomx, float zoomy)
{
ImBuf *scaleibuf;
int x, y, w= ibuf->x*zoomx, h= ibuf->y*zoomy;
const float max_x= ibuf->x-1.0f;
const float max_y= ibuf->y-1.0f;
const float scalex= 1.0f/zoomx;
const float scaley= 1.0f/zoomy;
@ -1472,15 +1474,13 @@ static ImBuf *scale_trackpreview_ibuf(ImBuf *ibuf, float zoomx, float zoomy)
for(y= 0; y<scaleibuf->y; y++) {
for (x= 0; x<scaleibuf->x; x++) {
int pixel= scaleibuf->x*y + x;
int orig_pixel= ibuf->x*(int)(scaley*(float)y) + (int)(scalex*(float)x);
char *rrgb= (char*)scaleibuf->rect + pixel*4;
char *orig_rrgb= (char*)ibuf->rect + orig_pixel*4;
float src_x= scalex*x;
float src_y= scaley*y;
rrgb[0]= orig_rrgb[0];
rrgb[1]= orig_rrgb[1];
rrgb[2]= orig_rrgb[2];
rrgb[3]= orig_rrgb[3];
CLAMP(src_x, 0, max_x);
CLAMP(src_y, 0, max_y);
bicubic_interpolation(ibuf, scaleibuf, src_x, src_y, x, y);
}
}

@ -2201,6 +2201,17 @@ static void list_item_row(bContext *C, uiLayout *layout, PointerRNA *ptr, Pointe
}
uiDefButR(block, OPTION, 0, "", 0, 0, UI_UNIT_X, UI_UNIT_Y, itemptr, "is_active", i, 0, 0, 0, 0, NULL);
}
else if(itemptr->type == &RNA_MovieTrackingObject) {
MovieTrackingObject *tracking_object= (MovieTrackingObject*)itemptr->data;
split= uiLayoutSplit(sub, 0.75f, 0);
if(tracking_object->flag&TRACKING_OBJECT_CAMERA) {
uiItemL(split, name, ICON_CAMERA_DATA);
}
else {
uiItemL(split, name, ICON_OBJECT_DATA);
}
}
/* There is a last chance to display custom controls (in addition to the name/label):
* If the given item property group features a string property named as prop_list,

@ -408,8 +408,21 @@ static void test_constraints (Object *owner, bPoseChannel *pchan)
if((data->flag&CAMERASOLVER_ACTIVECLIP)==0) {
if(data->clip != NULL && data->track[0]) {
if (!BKE_tracking_named_track(&data->clip->tracking, data->track))
MovieTracking *tracking= &data->clip->tracking;
MovieTrackingObject *tracking_object;
if(data->object[0])
tracking_object= BKE_tracking_named_object(tracking, data->object);
else
tracking_object= BKE_tracking_get_camera_object(tracking);
if(!tracking_object) {
curcon->flag |= CONSTRAINT_DISABLE;
}
else {
if (!BKE_tracking_named_track(tracking, tracking_object, data->track))
curcon->flag |= CONSTRAINT_DISABLE;
}
}
else curcon->flag |= CONSTRAINT_DISABLE;
}
@ -420,6 +433,12 @@ static void test_constraints (Object *owner, bPoseChannel *pchan)
if((data->flag&CAMERASOLVER_ACTIVECLIP)==0 && data->clip == NULL)
curcon->flag |= CONSTRAINT_DISABLE;
}
else if (curcon->type == CONSTRAINT_TYPE_OBJECTSOLVER) {
bObjectSolverConstraint *data = curcon->data;
if((data->flag&CAMERASOLVER_ACTIVECLIP)==0 && data->clip == NULL)
curcon->flag |= CONSTRAINT_DISABLE;
}
/* Check targets for constraints */
if (cti && cti->get_constraint_targets) {
@ -684,25 +703,13 @@ void CONSTRAINT_OT_limitdistance_reset (wmOperatorType *ot)
/* ------------- Child-Of Constraint ------------------ */
/* ChildOf Constraint - set inverse callback */
static int childof_set_inverse_exec (bContext *C, wmOperator *op)
static void child_get_inverse_matrix (Scene *scene, Object *ob, bConstraint *con, float invmat[4][4])
{
Scene *scene= CTX_data_scene(C);
Object *ob = ED_object_active_context(C);
bConstraint *con = edit_constraint_property_get(op, ob, CONSTRAINT_TYPE_CHILDOF);
bChildOfConstraint *data= (con) ? (bChildOfConstraint *)con->data : NULL;
bConstraint *lastcon = NULL;
bPoseChannel *pchan= NULL;
/* despite 3 layers of checks, we may still not be able to find a constraint */
if (data == NULL) {
printf("DEBUG: Child-Of Set Inverse - object = '%s'\n", (ob)? ob->id.name+2 : "<None>");
BKE_report(op->reports, RPT_ERROR, "Couldn't find constraint data for Child-Of Set Inverse");
return OPERATOR_CANCELLED;
}
/* nullify inverse matrix first */
unit_m4(data->invmat);
unit_m4(invmat);
/* try to find a pose channel - assume that this is the constraint owner */
// TODO: get from context instead?
@ -748,7 +755,7 @@ static int childof_set_inverse_exec (bContext *C, wmOperator *op)
*/
invert_m4_m4(imat, pchan->pose_mat);
mult_m4_m4m4(tmat, pmat, imat);
invert_m4_m4(data->invmat, tmat);
invert_m4_m4(invmat, tmat);
/* 5. restore constraints */
pchan->constraints.last = lastcon;
@ -770,9 +777,27 @@ static int childof_set_inverse_exec (bContext *C, wmOperator *op)
/* use what_does_parent to find inverse - just like for normal parenting */
what_does_parent(scene, ob, &workob);
invert_m4_m4(data->invmat, workob.obmat);
invert_m4_m4(invmat, workob.obmat);
}
}
/* ChildOf Constraint - set inverse callback */
static int childof_set_inverse_exec (bContext *C, wmOperator *op)
{
Scene *scene= CTX_data_scene(C);
Object *ob = ED_object_active_context(C);
bConstraint *con = edit_constraint_property_get(op, ob, CONSTRAINT_TYPE_CHILDOF);
bChildOfConstraint *data= (con) ? (bChildOfConstraint *)con->data : NULL;
/* despite 3 layers of checks, we may still not be able to find a constraint */
if (data == NULL) {
printf("DEBUG: Child-Of Set Inverse - object = '%s'\n", (ob)? ob->id.name+2 : "<None>");
BKE_report(op->reports, RPT_ERROR, "Couldn't find constraint data for Child-Of Set Inverse");
return OPERATOR_CANCELLED;
}
child_get_inverse_matrix(scene, ob, con, data->invmat);
WM_event_add_notifier(C, NC_OBJECT|ND_CONSTRAINT, ob);
return OPERATOR_FINISHED;
@ -846,6 +871,96 @@ void CONSTRAINT_OT_childof_clear_inverse (wmOperatorType *ot)
edit_constraint_properties(ot);
}
/* ------------- Object Solver Constraint ------------------ */
static int objectsolver_set_inverse_exec (bContext *C, wmOperator *op)
{
Scene *scene= CTX_data_scene(C);
Object *ob = ED_object_active_context(C);
bConstraint *con = edit_constraint_property_get(op, ob, CONSTRAINT_TYPE_OBJECTSOLVER);
bObjectSolverConstraint *data= (con) ? (bObjectSolverConstraint *)con->data : NULL;
/* despite 3 layers of checks, we may still not be able to find a constraint */
if (data == NULL) {
printf("DEBUG: Child-Of Set Inverse - object = '%s'\n", (ob)? ob->id.name+2 : "<None>");
BKE_report(op->reports, RPT_ERROR, "Couldn't find constraint data for Child-Of Set Inverse");
return OPERATOR_CANCELLED;
}
child_get_inverse_matrix(scene, ob, con, data->invmat);
WM_event_add_notifier(C, NC_OBJECT|ND_CONSTRAINT, ob);
return OPERATOR_FINISHED;
}
static int objectsolver_set_inverse_invoke(bContext *C, wmOperator *op, wmEvent *UNUSED(event))
{
if (edit_constraint_invoke_properties(C, op))
return objectsolver_set_inverse_exec(C, op);
else
return OPERATOR_CANCELLED;
}
void CONSTRAINT_OT_objectsolver_set_inverse (wmOperatorType *ot)
{
/* identifiers */
ot->name= "Set Inverse";
ot->idname= "CONSTRAINT_OT_objectsolver_set_inverse";
ot->description= "Set inverse correction for ObjectSolver constraint";
ot->exec= objectsolver_set_inverse_exec;
ot->invoke= objectsolver_set_inverse_invoke;
ot->poll= edit_constraint_poll;
/* flags */
ot->flag= OPTYPE_REGISTER|OPTYPE_UNDO;
edit_constraint_properties(ot);
}
static int objectsolver_clear_inverse_exec (bContext *C, wmOperator *op)
{
Object *ob = ED_object_active_context(C);
bConstraint *con = edit_constraint_property_get(op, ob, CONSTRAINT_TYPE_OBJECTSOLVER);
bObjectSolverConstraint *data= (con) ? (bObjectSolverConstraint *)con->data : NULL;
if(data==NULL) {
BKE_report(op->reports, RPT_ERROR, "Childof constraint not found");
return OPERATOR_CANCELLED;
}
/* simply clear the matrix */
unit_m4(data->invmat);
WM_event_add_notifier(C, NC_OBJECT|ND_CONSTRAINT, ob);
return OPERATOR_FINISHED;
}
static int objectsolver_clear_inverse_invoke(bContext *C, wmOperator *op, wmEvent *UNUSED(event))
{
if (edit_constraint_invoke_properties(C, op))
return objectsolver_clear_inverse_exec(C, op);
else
return OPERATOR_CANCELLED;
}
void CONSTRAINT_OT_objectsolver_clear_inverse (wmOperatorType *ot)
{
/* identifiers */
ot->name= "Clear Inverse";
ot->idname= "CONSTRAINT_OT_objectsolver_clear_inverse";
ot->description= "Clear inverse correction for ObjectSolver constraint";
ot->exec= objectsolver_clear_inverse_exec;
ot->invoke= objectsolver_clear_inverse_invoke;
ot->poll= edit_constraint_poll;
/* flags */
ot->flag= OPTYPE_REGISTER|OPTYPE_UNDO;
edit_constraint_properties(ot);
}
/***************************** BUTTONS ****************************/
void ED_object_constraint_set_active(Object *ob, bConstraint *con)

@ -187,6 +187,8 @@ void CONSTRAINT_OT_stretchto_reset(struct wmOperatorType *ot);
void CONSTRAINT_OT_limitdistance_reset(struct wmOperatorType *ot);
void CONSTRAINT_OT_childof_set_inverse(struct wmOperatorType *ot);
void CONSTRAINT_OT_childof_clear_inverse(struct wmOperatorType *ot);
void CONSTRAINT_OT_objectsolver_set_inverse(struct wmOperatorType *ot);
void CONSTRAINT_OT_objectsolver_clear_inverse (struct wmOperatorType *ot);
/* object_vgroup.c */
void OBJECT_OT_vertex_group_add(struct wmOperatorType *ot);

@ -161,6 +161,8 @@ void ED_operatortypes_object(void)
WM_operatortype_append(CONSTRAINT_OT_limitdistance_reset);
WM_operatortype_append(CONSTRAINT_OT_childof_set_inverse);
WM_operatortype_append(CONSTRAINT_OT_childof_clear_inverse);
WM_operatortype_append(CONSTRAINT_OT_objectsolver_set_inverse);
WM_operatortype_append(CONSTRAINT_OT_objectsolver_clear_inverse);
WM_operatortype_append(OBJECT_OT_vertex_group_add);
WM_operatortype_append(OBJECT_OT_vertex_group_remove);

@ -138,11 +138,14 @@ void image_buffer_rect_update(Scene *scene, RenderResult *rr, ImBuf *ibuf, volat
rectf+= 4*(rr->rectx*ymin + xmin);
rectc= (unsigned char*)(ibuf->rect + ibuf->x*rymin + rxmin);
if(scene && (scene->r.color_mgt_flag & R_COLOR_MANAGEMENT))
if(scene && (scene->r.color_mgt_flag & R_COLOR_MANAGEMENT)) {
profile_from= IB_PROFILE_LINEAR_RGB;
else
predivide= (scene->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE);
}
else {
profile_from= IB_PROFILE_SRGB;
predivide= 0;
predivide= 0;
}
IMB_buffer_byte_from_float(rectc, rectf,
4, ibuf->dither, IB_PROFILE_SRGB, profile_from, predivide,

@ -460,12 +460,15 @@ static int ed_preview_draw_rect(ScrArea *sa, Scene *sce, ID *id, int split, int
Render *re;
RenderResult rres;
char name[32];
int do_gamma_correct=0;
int do_gamma_correct=0, do_predivide=0;
int offx=0, newx= rect->xmax-rect->xmin, newy= rect->ymax-rect->ymin;
if (id && GS(id->name) != ID_TE) {
/* exception: don't color manage texture previews - show the raw values */
if (sce) do_gamma_correct = sce->r.color_mgt_flag & R_COLOR_MANAGEMENT;
if (sce) {
do_gamma_correct = sce->r.color_mgt_flag & R_COLOR_MANAGEMENT;
do_predivide = sce->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE;
}
}
if(!split || first) sprintf(name, "Preview %p", (void *)sa);
@ -488,10 +491,28 @@ static int ed_preview_draw_rect(ScrArea *sa, Scene *sce, ID *id, int split, int
if(rres.rectf) {
if(ABS(rres.rectx-newx)<2 && ABS(rres.recty-newy)<2) {
newrect->xmax= MAX2(newrect->xmax, rect->xmin + rres.rectx + offx);
newrect->ymax= MAX2(newrect->ymax, rect->ymin + rres.recty);
glaDrawPixelsSafe_to32(rect->xmin+offx, rect->ymin, rres.rectx, rres.recty, rres.rectx, rres.rectf, do_gamma_correct);
if(rres.rectx && rres.recty) {
/* temporary conversion to byte for drawing */
float fx= rect->xmin + offx;
float fy= rect->ymin;
int profile_from= (do_gamma_correct)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int dither= 0;
unsigned char *rect_byte;
rect_byte= MEM_mallocN(rres.rectx*rres.recty*sizeof(int), "ed_preview_draw_rect");
IMB_buffer_byte_from_float(rect_byte, rres.rectf,
4, dither, IB_PROFILE_SRGB, profile_from, do_predivide,
rres.rectx, rres.recty, rres.rectx, rres.rectx);
glaDrawPixelsSafe(fx, fy, rres.rectx, rres.recty, rres.rectx, GL_RGBA, GL_UNSIGNED_BYTE, rect_byte);
MEM_freeN(rect_byte);
}
RE_ReleaseResultImage(re);
return 1;

@ -45,9 +45,6 @@
#include "BIF_gl.h"
#include "BIF_glutil.h"
#include "IMB_imbuf.h"
#include "IMB_imbuf_types.h"
#ifndef GL_CLAMP_TO_EDGE
#define GL_CLAMP_TO_EDGE 0x812F
#endif
@ -562,27 +559,6 @@ void glaDrawPixelsTex(float x, float y, int img_w, int img_h, int format, void *
glaDrawPixelsTexScaled(x, y, img_w, img_h, format, rect, 1.0f, 1.0f);
}
/* row_w is unused but kept for completeness */
void glaDrawPixelsSafe_to32(float fx, float fy, int img_w, int img_h, int UNUSED(row_w), float *rectf, int do_gamma_correct)
{
unsigned char *rect32;
int profile_from= (do_gamma_correct)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int predivide= 0;
/* copy imgw-imgh to a temporal 32 bits rect */
if(img_w<1 || img_h<1) return;
rect32= MEM_mallocN(img_w*img_h*sizeof(int), "temp 32 bits");
IMB_buffer_byte_from_float(rect32, rectf,
4, 0, IB_PROFILE_SRGB, profile_from, predivide,
img_w, img_h, img_w, img_w);
glaDrawPixelsSafe(fx, fy, img_w, img_h, img_w, GL_RGBA, GL_UNSIGNED_BYTE, rect32);
MEM_freeN(rect32);
}
void glaDrawPixelsSafe(float x, float y, int img_w, int img_h, int row_w, int format, int type, void *rect)
{
float xzoom= glaGetOneFloat(GL_ZOOM_X);

@ -1222,14 +1222,14 @@ static void do_weight_paint_auto_normalize_all_groups(MDeformVert *dvert, const
See if the current deform vertex has a locked group
*/
static char has_locked_group(MDeformVert *dvert, const int defbase_tot,
const char *lock_flags)
const char *bone_groups, const char *lock_flags)
{
int i;
MDeformWeight *dw;
for (i= dvert->totweight, dw= dvert->dw; i != 0; i--, dw++) {
if (dw->def_nr < defbase_tot) {
if (lock_flags[dw->def_nr] && dw->weight > 0.0f) {
if (bone_groups[dw->def_nr] && lock_flags[dw->def_nr] && dw->weight > 0.0f) {
return TRUE;
}
}
@ -1406,7 +1406,7 @@ static void enforce_locks(MDeformVert *odv, MDeformVert *ndv,
char *change_status;
if(!lock_flags || !has_locked_group(ndv, defbase_tot, lock_flags)) {
if(!lock_flags || !has_locked_group(ndv, defbase_tot, vgroup_validmap, lock_flags)) {
return;
}
/* record if a group was changed, unlocked and not changed, or locked */
@ -1679,7 +1679,7 @@ static void do_weight_paint_vertex( /* vars which remain the same for every vert
/* If there are no locks or multipaint,
* then there is no need to run the more complicated checks */
if ( (wpi->do_multipaint == FALSE || wpi->defbase_tot_sel <= 1) &&
(wpi->lock_flags == NULL || has_locked_group(dv, wpi->defbase_tot, wpi->lock_flags) == FALSE))
(wpi->lock_flags == NULL || has_locked_group(dv, wpi->defbase_tot, wpi->vgroup_validmap, wpi->lock_flags) == FALSE))
{
wpaint_blend(wp, dw, uw, alpha, paintweight, wpi->do_flip, FALSE);
@ -1961,12 +1961,13 @@ static int wpaint_stroke_test_start(bContext *C, wmOperator *op, wmEvent *UNUSED
Object *ob= CTX_data_active_object(C);
struct WPaintData *wpd;
Mesh *me;
// bDeformGroup *dg;
bDeformGroup *dg;
float mat[4][4], imat[4][4];
if(scene->obedit) return OPERATOR_CANCELLED;
if(scene->obedit) {
return FALSE;
}
me= get_mesh(ob);
if(me==NULL || me->totpoly==0) return OPERATOR_PASS_THROUGH;
@ -2005,22 +2006,17 @@ static int wpaint_stroke_test_start(bContext *C, wmOperator *op, wmEvent *UNUSED
/* ensure we dont try paint onto an invalid group */
if (ob->actdef <= 0) {
return OPERATOR_PASS_THROUGH;
BKE_report(op->reports, RPT_WARNING, "No active vertex group for painting, aborting");
return FALSE;
}
#if 0
/* check if we are attempting to paint onto a locked vertex group,
* and other options disallow it from doing anything useful */
dg = BLI_findlink(&ob->defbase, (ob->actdef-1));
if ( (dg->flag & DG_LOCK_WEIGHT) &&
(ts->auto_normalize == FALSE) &&
(ts->multipaint == FALSE) )
{
BKE_report(op->reports, RPT_WARNING, "Active group is locked, multi-paint/normalize disabled, aborting");
return OPERATOR_CANCELLED;
if (dg->flag & DG_LOCK_WEIGHT) {
BKE_report(op->reports, RPT_WARNING, "Active group is locked, aborting");
return FALSE;
}
#endif
/* ALLOCATIONS! no return after this line */
/* make mode data storage */
@ -2054,7 +2050,7 @@ static int wpaint_stroke_test_start(bContext *C, wmOperator *op, wmEvent *UNUSED
wpd->vgroup_mirror = wpaint_mirror_vgroup_ensure(ob, wpd->vgroup_active);
}
return 1;
return TRUE;
}
static void wpaint_stroke_update_step(bContext *C, struct PaintStroke *stroke, PointerRNA *itemptr)

@ -96,6 +96,8 @@ static void draw_movieclip_cache(SpaceClip *sc, ARegion *ar, MovieClip *clip, Sc
float x;
int *points, totseg, i, a;
float sfra= SFRA, efra= EFRA, framelen= ar->winx/(efra-sfra+1);
MovieTrackingTrack *act_track= BKE_tracking_active_track(&clip->tracking);
MovieTrackingReconstruction *reconstruction= BKE_tracking_get_reconstruction(&clip->tracking);
glEnable(GL_BLEND);
@ -119,8 +121,8 @@ static void draw_movieclip_cache(SpaceClip *sc, ARegion *ar, MovieClip *clip, Sc
}
/* track */
if(clip->tracking.act_track) {
MovieTrackingTrack *track= clip->tracking.act_track;
if(act_track) {
MovieTrackingTrack *track= act_track;
for(i= sfra, a= 0; i <= efra; i++) {
int framenr;
@ -152,9 +154,9 @@ static void draw_movieclip_cache(SpaceClip *sc, ARegion *ar, MovieClip *clip, Sc
}
/* failed frames */
if(clip->tracking.reconstruction.flag&TRACKING_RECONSTRUCTED) {
int n= clip->tracking.reconstruction.camnr;
MovieReconstructedCamera *cameras= clip->tracking.reconstruction.cameras;
if(reconstruction->flag&TRACKING_RECONSTRUCTED) {
int n= reconstruction->camnr;
MovieReconstructedCamera *cameras= reconstruction->cameras;
glColor4ub(255, 0, 0, 96);
@ -835,8 +837,9 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
{
float x, y;
MovieTracking* tracking= &clip->tracking;
MovieTrackingMarker *marker;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *track, *act_track;
MovieTrackingMarker *marker;
int framenr= sc->user.framenr;
int undistort= sc->user.render_flag&MCLIP_PROXY_RENDER_UNDISTORT;
float *marker_pos= NULL, *fp, *active_pos= NULL, cur_pos[2];
@ -858,13 +861,13 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
glMultMatrixf(sc->stabmat);
glScalef(width, height, 0);
act_track= clip->tracking.act_track;
act_track= BKE_tracking_active_track(tracking);
if(sc->user.render_flag&MCLIP_PROXY_RENDER_UNDISTORT) {
int count= 0;
/* count */
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if((track->flag&TRACK_HIDDEN)==0) {
marker= BKE_tracking_get_marker(track, framenr);
@ -880,7 +883,7 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
if(count) {
marker_pos= MEM_callocN(2*sizeof(float)*count, "draw_tracking_tracks marker_pos");
track= tracking->tracks.first;
track= tracksbase->first;
fp= marker_pos;
while(track) {
if((track->flag&TRACK_HIDDEN)==0) {
@ -902,7 +905,7 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
}
if(sc->flag&SC_SHOW_TRACK_PATH) {
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if((track->flag&TRACK_HIDDEN)==0)
draw_track_path(sc, clip, track);
@ -912,7 +915,7 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
}
/* markers outline and non-selected areas */
track= tracking->tracks.first;
track= tracksbase->first;
fp= marker_pos;
while(track) {
if((track->flag&TRACK_HIDDEN)==0) {
@ -936,7 +939,7 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
/* selected areas only, so selection wouldn't be overlapped by
non-selected areas */
track= tracking->tracks.first;
track= tracksbase->first;
fp= marker_pos;
while(track) {
if((track->flag&TRACK_HIDDEN)==0) {
@ -974,15 +977,16 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
}
if(sc->flag&SC_SHOW_BUNDLES) {
MovieTrackingObject *object= BKE_tracking_active_object(tracking);
float pos[4], vec[4], mat[4][4], aspy;
glEnable(GL_POINT_SMOOTH);
glPointSize(3.0f);
aspy= 1.0f/clip->tracking.camera.pixel_aspect;
BKE_tracking_projection_matrix(tracking, framenr, width, height, mat);
BKE_tracking_projection_matrix(tracking, object, framenr, width, height, mat);
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if((track->flag&TRACK_HIDDEN)==0 && track->flag&TRACK_HAS_BUNDLE) {
marker= BKE_tracking_get_marker(track, framenr);
@ -1027,7 +1031,7 @@ static void draw_tracking_tracks(SpaceClip *sc, ARegion *ar, MovieClip *clip,
if(sc->flag&SC_SHOW_NAMES) {
/* scaling should be cleared before drawing texts, otherwise font would also be scaled */
track= tracking->tracks.first;
track= tracksbase->first;
fp= marker_pos;
while(track) {
if((track->flag&TRACK_HIDDEN)==0) {

@ -171,12 +171,13 @@ static int selected_boundbox(SpaceClip *sc, float min[2], float max[2])
MovieClip *clip= ED_space_clip(sc);
MovieTrackingTrack *track;
int width, height, ok= 0;
ListBase *tracksbase= BKE_tracking_get_tracks(&clip->tracking);
INIT_MINMAX2(min, max);
ED_space_clip_size(sc, &width, &height);
track= clip->tracking.tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track)) {
MovieTrackingMarker *marker= BKE_tracking_get_marker(track, sc->user.framenr);

@ -177,6 +177,7 @@ static void draw_tracks_curves(View2D *v2d, SpaceClip *sc)
{
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
int width, height;
struct { MovieTrackingTrack *act_track; int sel; float xscale, yscale, hsize; } userdata;
@ -188,13 +189,13 @@ static void draw_tracks_curves(View2D *v2d, SpaceClip *sc)
/* non-selected knot handles */
userdata.hsize= UI_GetThemeValuef(TH_HANDLE_VERTEX_SIZE);
userdata.sel= 0;
userdata.act_track= clip->tracking.act_track;
userdata.act_track= act_track;
UI_view2d_getscale(v2d, &userdata.xscale, &userdata.yscale);
clip_graph_tracking_values_iterate(sc, &userdata, tracking_segment_knot_cb, NULL, NULL);
/* draw graph lines */
glEnable(GL_BLEND);
clip_graph_tracking_values_iterate(sc, tracking->act_track, tracking_segment_point_cb, tracking_segment_start_cb, tracking_segment_end_cb);
clip_graph_tracking_values_iterate(sc, act_track, tracking_segment_point_cb, tracking_segment_start_cb, tracking_segment_end_cb);
glDisable(GL_BLEND);
/* selected knot handles on top of curves */
@ -206,7 +207,7 @@ static void draw_frame_curves(SpaceClip *sc)
{
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
MovieTrackingReconstruction *reconstruction= &tracking->reconstruction;
MovieTrackingReconstruction *reconstruction= BKE_tracking_get_reconstruction(tracking);
int i, lines= 0, prevfra= 0;
glColor3f(0.0f, 0.0f, 1.0f);

@ -155,13 +155,14 @@ static int mouse_select_knot(bContext *C, float co[2], int extend)
ARegion *ar= CTX_wm_region(C);
View2D *v2d= &ar->v2d;
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
static const int delta= 6;
if(tracking->act_track) {
if(act_track) {
MouseSelectUserData userdata;
mouse_select_init_data(&userdata, co);
clip_graph_tracking_values_iterate_track(sc, tracking->act_track,
clip_graph_tracking_values_iterate_track(sc, act_track,
&userdata, find_nearest_tracking_knot_cb, NULL, NULL);
if(userdata.marker) {
@ -191,6 +192,7 @@ static int mouse_select_curve(bContext *C, float co[2], int extend)
SpaceClip *sc= CTX_wm_space_clip(C);
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
MouseSelectUserData userdata;
mouse_select_init_data(&userdata, co);
@ -198,12 +200,12 @@ static int mouse_select_curve(bContext *C, float co[2], int extend)
if(userdata.track) {
if(extend) {
if(tracking->act_track==userdata.track) {
if(act_track==userdata.track) {
/* currently only single curve can be selected (selected curve represents active track) */
tracking->act_track= NULL;
act_track= NULL;
}
}
else if(tracking->act_track!=userdata.track) {
else if(act_track!=userdata.track) {
MovieTrackingMarker *marker;
SelectUserData selectdata = {SEL_DESELECT};
@ -292,9 +294,11 @@ static int delete_curve_exec(bContext *C, wmOperator *UNUSED(op))
SpaceClip *sc= CTX_wm_space_clip(C);
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
if(tracking->act_track)
clip_delete_track(C, clip, tracking->act_track);
if(act_track)
clip_delete_track(C, clip, tracksbase, act_track);
return OPERATOR_FINISHED;
}
@ -322,16 +326,17 @@ static int delete_knot_exec(bContext *C, wmOperator *UNUSED(op))
SpaceClip *sc= CTX_wm_space_clip(C);
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
if(tracking->act_track) {
if(act_track) {
int a= 0;
MovieTrackingTrack *track= tracking->act_track;
while(a<track->markersnr) {
MovieTrackingMarker *marker= &track->markers[a];
while(a<act_track->markersnr) {
MovieTrackingMarker *marker= &act_track->markers[a];
if(marker->flag&MARKER_GRAPH_SEL)
clip_delete_marker(C, clip, track, marker);
clip_delete_marker(C, clip, tracksbase, act_track, marker);
else
a++;
}

@ -92,8 +92,8 @@ void clip_graph_tracking_values_iterate(struct SpaceClip *sc, void *userdata,
void clip_graph_tracking_iterate(struct SpaceClip *sc, void *userdata,
void (*func) (void *userdata, struct MovieTrackingMarker *marker));
void clip_delete_track(struct bContext *C, struct MovieClip *clip, struct MovieTrackingTrack *track);
void clip_delete_marker(struct bContext *C, struct MovieClip *clip, struct MovieTrackingTrack *track, struct MovieTrackingMarker *marker);
void clip_delete_track(struct bContext *C, struct MovieClip *clip, struct ListBase *tracksbase, struct MovieTrackingTrack *track);
void clip_delete_marker(struct bContext *C, struct MovieClip *clip, struct ListBase *tracksbase, struct MovieTrackingTrack *track, struct MovieTrackingMarker *marker);
void clip_view_center_to_point(struct SpaceClip *sc, float x, float y);
@ -124,6 +124,7 @@ void CLIP_OT_set_origin(struct wmOperatorType *ot);
void CLIP_OT_set_floor(struct wmOperatorType *ot);
void CLIP_OT_set_axis(struct wmOperatorType *ot);
void CLIP_OT_set_scale(struct wmOperatorType *ot);
void CLIP_OT_set_solution_scale(struct wmOperatorType *ot);
void CLIP_OT_set_center_principal(struct wmOperatorType *ot);
@ -139,6 +140,9 @@ void CLIP_OT_stabilize_2d_remove(struct wmOperatorType *ot);
void CLIP_OT_stabilize_2d_select(struct wmOperatorType *ot);
void CLIP_OT_stabilize_2d_set_rotation(struct wmOperatorType *ot);
void CLIP_OT_clean_tracks(wmOperatorType *ot);
void CLIP_OT_clean_tracks(struct wmOperatorType *ot);
void CLIP_OT_tracking_object_new(struct wmOperatorType *ot);
void CLIP_OT_tracking_object_remove(struct wmOperatorType *ot);
#endif /* ED_CLIP_INTERN_H */

@ -119,9 +119,10 @@ void clip_graph_tracking_values_iterate(SpaceClip *sc, void *userdata,
{
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *track;
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track)) {
clip_graph_tracking_values_iterate_track(sc, track, userdata, func, segment_start, segment_end);
@ -136,9 +137,10 @@ void clip_graph_tracking_iterate(SpaceClip *sc, void *userdata,
{
MovieClip *clip= ED_space_clip(sc);
MovieTracking *tracking= &clip->tracking;
ListBase *tracksbase= BKE_tracking_get_tracks(tracking);
MovieTrackingTrack *track;
track= tracking->tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track)) {
int i;
@ -158,14 +160,15 @@ void clip_graph_tracking_iterate(SpaceClip *sc, void *userdata,
}
}
void clip_delete_track(bContext *C, MovieClip *clip, MovieTrackingTrack *track)
void clip_delete_track(bContext *C, MovieClip *clip, ListBase *tracksbase, MovieTrackingTrack *track)
{
MovieTracking *tracking= &clip->tracking;
MovieTrackingStabilization *stab= &tracking->stabilization;
MovieTrackingTrack *act_track= BKE_tracking_active_track(tracking);
int has_bundle= 0, update_stab= 0;
if(track==tracking->act_track)
if(track==act_track)
tracking->act_track= NULL;
if(track==stab->rot_track) {
@ -179,7 +182,7 @@ void clip_delete_track(bContext *C, MovieClip *clip, MovieTrackingTrack *track)
has_bundle= 1;
BKE_tracking_free_track(track);
BLI_freelinkN(&tracking->tracks, track);
BLI_freelinkN(tracksbase, track);
WM_event_add_notifier(C, NC_MOVIECLIP|NA_EDITED, clip);
@ -194,10 +197,10 @@ void clip_delete_track(bContext *C, MovieClip *clip, MovieTrackingTrack *track)
WM_event_add_notifier(C, NC_SPACE|ND_SPACE_VIEW3D, NULL);
}
void clip_delete_marker(bContext *C, MovieClip *clip, MovieTrackingTrack *track, MovieTrackingMarker *marker)
void clip_delete_marker(bContext *C, MovieClip *clip, ListBase *tracksbase, MovieTrackingTrack *track, MovieTrackingMarker *marker)
{
if(track->markersnr==1) {
clip_delete_track(C, clip, track);
clip_delete_track(C, clip, tracksbase, track);
}
else {
BKE_tracking_delete_marker(track, marker->framenr);

@ -351,6 +351,7 @@ static void clip_operatortypes(void)
WM_operatortype_append(CLIP_OT_set_floor);
WM_operatortype_append(CLIP_OT_set_axis);
WM_operatortype_append(CLIP_OT_set_scale);
WM_operatortype_append(CLIP_OT_set_solution_scale);
/* detect */
WM_operatortype_append(CLIP_OT_detect_features);
@ -372,6 +373,10 @@ static void clip_operatortypes(void)
WM_operatortype_append(CLIP_OT_graph_select);
WM_operatortype_append(CLIP_OT_graph_delete_curve);
WM_operatortype_append(CLIP_OT_graph_delete_knot);
/* object tracking */
WM_operatortype_append(CLIP_OT_tracking_object_new);
WM_operatortype_append(CLIP_OT_tracking_object_remove);
}
static void clip_keymap(struct wmKeyConfig *keyconf)

File diff suppressed because it is too large Load Diff

@ -750,7 +750,9 @@ void uiTemplateImage(uiLayout *layout, bContext *C, PointerRNA *ptr, const char
uiLayoutSetActive(row, RNA_boolean_get(&imaptr, "use_fields"));
uiItemR(row, &imaptr, "field_order", UI_ITEM_R_EXPAND, NULL, ICON_NONE);
uiItemR(split, &imaptr, "use_premultiply", 0, NULL, ICON_NONE);
row= uiLayoutRow(layout, 0);
uiItemR(row, &imaptr, "use_premultiply", 0, NULL, ICON_NONE);
uiItemR(row, &imaptr, "use_color_unpremultiply", 0, NULL, ICON_NONE);
}
}

@ -248,12 +248,10 @@ static void node_area_listener(ScrArea *sa, wmNotifier *wmn)
case NC_IMAGE:
if (wmn->action == NA_EDITED) {
if(type==NTREE_COMPOSIT) {
Scene *scene= wmn->window->screen->scene;
/* note that nodeUpdateID is already called by BKE_image_signal() on all
* scenes so really this is just to know if the images is used in the compo else
* painting on images could become very slow when the compositor is open. */
if(nodeUpdateID(scene->nodetree, wmn->reference))
if(nodeUpdateID(snode->nodetree, wmn->reference))
ED_area_tag_refresh(sa);
}
}

@ -1511,48 +1511,49 @@ static void draw_bundle_sphere(void)
glCallList(displist);
}
static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d, MovieClip *clip, int flag)
static void draw_viewport_object_reconstruction(Scene *scene, Base *base, View3D *v3d,
MovieClip *clip, MovieTrackingObject *tracking_object, int flag, int *global_track_index)
{
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *track;
float mat[4][4], imat[4][4], curcol[4];
float mat[4][4], imat[4][4];
unsigned char col[4], scol[4];
int bundlenr= 1;
if((v3d->flag2&V3D_SHOW_RECONSTRUCTION)==0)
return;
if(v3d->flag2&V3D_RENDER_OVERRIDE)
return;
glGetFloatv(GL_CURRENT_COLOR, curcol);
int tracknr= *global_track_index;
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, tracking_object);
UI_GetThemeColor4ubv(TH_TEXT, col);
UI_GetThemeColor4ubv(TH_SELECT, scol);
BKE_get_tracking_mat(scene, base->object, mat);
glEnable(GL_LIGHTING);
glColorMaterial(GL_FRONT_AND_BACK, GL_DIFFUSE);
glEnable(GL_COLOR_MATERIAL);
glShadeModel(GL_SMOOTH);
/* current ogl matrix is translated in camera space, bundles should
be rendered in world space, so camera matrix should be "removed"
from current ogl matrix */
invert_m4_m4(imat, base->object->obmat);
glPushMatrix();
glMultMatrixf(imat);
glMultMatrixf(mat);
for ( track= tracking->tracks.first; track; track= track->next) {
int selected= track->flag&SELECT || track->pat_flag&SELECT || track->search_flag&SELECT;
if(tracking_object->flag & TRACKING_OBJECT_CAMERA) {
/* current ogl matrix is translated in camera space, bundles should
be rendered in world space, so camera matrix should be "removed"
from current ogl matrix */
invert_m4_m4(imat, base->object->obmat);
glMultMatrixf(imat);
glMultMatrixf(mat);
}
else {
float obmat[4][4];
BKE_tracking_get_interpolated_camera(tracking, tracking_object, scene->r.cfra, obmat);
invert_m4_m4(imat, obmat);
glMultMatrixf(imat);
}
for (track= tracksbase->first; track; track= track->next) {
int selected= TRACK_SELECTED(track);
if((track->flag&TRACK_HAS_BUNDLE)==0)
continue;
if(flag&DRAW_PICKING)
glLoadName(base->selcol + (bundlenr<<16));
glLoadName(base->selcol + (tracknr<<16));
glPushMatrix();
glTranslatef(track->bundle_pos[0], track->bundle_pos[1], track->bundle_pos[2]);
@ -1560,7 +1561,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
if(v3d->drawtype==OB_WIRE) {
glDisable(GL_LIGHTING);
glDepthMask(0);
if(selected) {
if(base==BASACT) UI_ThemeColor(TH_ACTIVE);
@ -1572,7 +1572,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
drawaxes(0.05f, v3d->bundle_drawtype);
glDepthMask(1);
glEnable(GL_LIGHTING);
} else if(v3d->drawtype>OB_WIRE) {
if(v3d->bundle_drawtype==OB_EMPTY_SPHERE) {
@ -1581,7 +1580,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
if(base==BASACT) UI_ThemeColor(TH_ACTIVE);
else UI_ThemeColor(TH_SELECT);
glDepthMask(0);
glLineWidth(2.f);
glDisable(GL_LIGHTING);
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
@ -1591,7 +1589,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glEnable(GL_LIGHTING);
glLineWidth(1.f);
glDepthMask(1);
}
if(track->flag&TRACK_CUSTOMCOLOR) glColor3fv(track->color);
@ -1600,7 +1597,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
draw_bundle_sphere();
} else {
glDisable(GL_LIGHTING);
glDepthMask(0);
if(selected) {
if(base==BASACT) UI_ThemeColor(TH_ACTIVE);
@ -1612,7 +1608,6 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
drawaxes(0.05f, v3d->bundle_drawtype);
glDepthMask(1);
glEnable(GL_LIGHTING);
}
}
@ -1630,32 +1625,67 @@ static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d,
view3d_cached_text_draw_add(pos, track->name, 10, V3D_CACHE_TEXT_GLOBALSPACE, tcol);
}
bundlenr++;
tracknr++;
}
if((flag & DRAW_PICKING)==0) {
if(v3d->flag2&V3D_SHOW_CAMERAPATH && clip->tracking.reconstruction.camnr) {
int a= 0;
MovieTrackingReconstruction *reconstruction= &tracking->reconstruction;
MovieReconstructedCamera *camera= tracking->reconstruction.cameras;
if((v3d->flag2&V3D_SHOW_CAMERAPATH) && (tracking_object->flag&TRACKING_OBJECT_CAMERA)) {
MovieTrackingReconstruction *reconstruction;
reconstruction= BKE_tracking_object_reconstruction(tracking, tracking_object);
glDisable(GL_LIGHTING);
UI_ThemeColor(TH_CAMERA_PATH);
glLineWidth(2.0f);
if(reconstruction->camnr) {
MovieReconstructedCamera *camera= reconstruction->cameras;
int a= 0;
glBegin(GL_LINE_STRIP);
for(a= 0; a<reconstruction->camnr; a++, camera++) {
glVertex3fv(camera->mat[3]);
}
glEnd();
glDisable(GL_LIGHTING);
UI_ThemeColor(TH_CAMERA_PATH);
glLineWidth(2.0f);
glLineWidth(1.0f);
glEnable(GL_LIGHTING);
glBegin(GL_LINE_STRIP);
for(a= 0; a<reconstruction->camnr; a++, camera++) {
glVertex3fv(camera->mat[3]);
}
glEnd();
glLineWidth(1.0f);
glEnable(GL_LIGHTING);
}
}
}
glPopMatrix();
*global_track_index= tracknr;
}
static void draw_viewport_reconstruction(Scene *scene, Base *base, View3D *v3d, MovieClip *clip, int flag)
{
MovieTracking *tracking= &clip->tracking;
MovieTrackingObject *tracking_object;
float curcol[4];
int global_track_index= 1;
if((v3d->flag2&V3D_SHOW_RECONSTRUCTION)==0)
return;
if(v3d->flag2&V3D_RENDER_OVERRIDE)
return;
glGetFloatv(GL_CURRENT_COLOR, curcol);
glEnable(GL_LIGHTING);
glColorMaterial(GL_FRONT_AND_BACK, GL_DIFFUSE);
glEnable(GL_COLOR_MATERIAL);
glShadeModel(GL_SMOOTH);
tracking_object= tracking->objects.first;
while(tracking_object) {
draw_viewport_object_reconstruction(scene, base, v3d, clip, tracking_object,
flag, &global_track_index);
tracking_object= tracking_object->next;
}
/* restore */
glShadeModel(GL_FLAT);
glDisable(GL_COLOR_MATERIAL);
@ -6922,7 +6952,34 @@ void draw_object(Scene *scene, ARegion *ar, View3D *v3d, Base *base, int flag)
ListBase targets = {NULL, NULL};
bConstraintTarget *ct;
if ((curcon->flag & CONSTRAINT_EXPAND) && (cti) && (cti->get_constraint_targets)) {
if(ELEM(cti->type, CONSTRAINT_TYPE_FOLLOWTRACK, CONSTRAINT_TYPE_OBJECTSOLVER)) {
/* special case for object solver and follow track constraints because they don't fill
constraint targets properly (design limitation -- scene is needed for their target
but it can't be accessed from get_targets callvack) */
Object *camob= NULL;
if(cti->type==CONSTRAINT_TYPE_FOLLOWTRACK) {
bFollowTrackConstraint *data= (bFollowTrackConstraint *)curcon->data;
camob= data->camera ? data->camera : scene->camera;
}
else if(cti->type==CONSTRAINT_TYPE_OBJECTSOLVER) {
bObjectSolverConstraint *data= (bObjectSolverConstraint *)curcon->data;
camob= data->camera ? data->camera : scene->camera;
}
if(camob) {
setlinestyle(3);
glBegin(GL_LINES);
glVertex3fv(camob->obmat[3]);
glVertex3fv(ob->obmat[3]);
glEnd();
setlinestyle(0);
}
}
else if ((curcon->flag & CONSTRAINT_EXPAND) && (cti) && (cti->get_constraint_targets)) {
cti->get_constraint_targets(curcon, &targets);
for (ct= targets.first; ct; ct= ct->next) {

@ -1328,6 +1328,25 @@ Base *ED_view3d_give_base_under_cursor(bContext *C, const int mval[2])
return basact;
}
static void deselect_all_tracks(MovieTracking *tracking)
{
MovieTrackingObject *object;
object= tracking->objects.first;
while(object) {
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
MovieTrackingTrack *track= tracksbase->first;
while(track) {
BKE_tracking_deselect_track(track, TRACK_AREA_ALL);
track= track->next;
}
object= object->next;
}
}
/* mval is region coords */
static int mouse_select(bContext *C, const int mval[2], short extend, short obcenter, short enumerate)
{
@ -1399,27 +1418,41 @@ static int mouse_select(bContext *C, const int mval[2], short extend, short obce
if(basact->object->type==OB_CAMERA) {
if(BASACT==basact) {
int i, hitresult;
MovieTrackingTrack *track;
int changed= 0;
for (i=0; i< hits; i++) {
hitresult= buffer[3+(i*4)];
/* if there's bundles in buffer select bundles first,
so non-camera elements should be ignored in buffer */
if(basact->selcol != (hitresult & 0xFFFF))
if(basact->selcol != (hitresult & 0xFFFF)) {
continue;
}
/* index of bundle is 1<<16-based. if there's no "bone" index
in hight word, this buffer value belongs to camera,. not to bundle */
if(buffer[4*i+3] & 0xFFFF0000) {
MovieClip *clip= object_get_movieclip(scene, basact->object, 0);
int selected;
track= BKE_tracking_indexed_track(&clip->tracking, hitresult >> 16);
MovieTracking *tracking= &clip->tracking;
ListBase *tracksbase;
MovieTrackingTrack *track;
selected= (track->flag&SELECT) || (track->pat_flag&SELECT) || (track->search_flag&SELECT);
track= BKE_tracking_indexed_track(&clip->tracking, hitresult >> 16, &tracksbase);
if(selected && extend) BKE_tracking_deselect_track(track, TRACK_AREA_ALL);
else BKE_tracking_select_track(&clip->tracking, track, TRACK_AREA_ALL, extend);
if(TRACK_SELECTED(track) && extend) {
changed= 0;
BKE_tracking_deselect_track(track, TRACK_AREA_ALL);
}
else {
int oldsel= TRACK_SELECTED(track) ? 1 : 0;
if(!extend)
deselect_all_tracks(tracking);
BKE_tracking_select_track(tracksbase, track, TRACK_AREA_ALL, extend);
if(oldsel!=(TRACK_SELECTED(track) ? 1 : 0))
changed= 1;
}
basact->flag|= SELECT;
basact->object->flag= basact->flag;
@ -1432,6 +1465,12 @@ static int mouse_select(bContext *C, const int mval[2], short extend, short obce
break;
}
}
if(!changed) {
/* fallback to regular object selection if no new bundles were selected,
allows to select object parented to reconstruction object */
basact= mouse_select_eval_buffer(&vc, buffer, hits, mval, startbase, 0);
}
}
}
else if(ED_do_pose_selectbuffer(scene, basact, buffer, hits, extend) ) { /* then bone is found */

@ -57,6 +57,7 @@
#include "BKE_object.h"
#include "BKE_tessmesh.h"
#include "BKE_DerivedMesh.h"
#include "BKE_scene.h"
#include "BKE_tracking.h"
#include "WM_api.h"
@ -815,28 +816,55 @@ void VIEW3D_OT_snap_cursor_to_grid(wmOperatorType *ot)
static void bundle_midpoint(Scene *scene, Object *ob, float vec[3])
{
MovieTrackingTrack *track;
MovieClip *clip= object_get_movieclip(scene, ob, 0);
MovieTracking *tracking= &clip->tracking;
MovieTrackingObject *object= tracking->objects.first;
int ok= 0;
float min[3], max[3], mat[4][4], pos[3];
float min[3], max[3], mat[4][4], pos[3], cammat[4][4];
if(!clip)
return;
unit_m4(cammat);
if(!scene->camera)
scene->camera= scene_find_camera(scene);
if(scene->camera)
copy_m4_m4(cammat, scene->camera->obmat);
BKE_get_tracking_mat(scene, ob, mat);
INIT_MINMAX(min, max);
track= clip->tracking.tracks.first;
while(track) {
int selected= (track->flag&SELECT) || (track->pat_flag&SELECT) || (track->search_flag&SELECT);
if((track->flag&TRACK_HAS_BUNDLE) && selected) {
ok= 1;
mul_v3_m4v3(pos, mat, track->bundle_pos);
DO_MINMAX(pos, min, max);
while(object) {
ListBase *tracksbase= BKE_tracking_object_tracks(tracking, object);
MovieTrackingTrack *track= tracksbase->first;
float obmat[4][4];
if(object->flag & TRACKING_OBJECT_CAMERA) {
copy_m4_m4(obmat, mat);
}
else {
float imat[4][4];
BKE_tracking_get_interpolated_camera(tracking, object, scene->r.cfra, imat);
invert_m4(imat);
mult_m4_m4m4(obmat, cammat, imat);
}
track= track->next;
while(track) {
if((track->flag&TRACK_HAS_BUNDLE) && TRACK_SELECTED(track)) {
ok= 1;
mul_v3_m4v3(pos, obmat, track->bundle_pos);
DO_MINMAX(pos, min, max);
}
track= track->next;
}
object= object->next;
}
if(ok) {

@ -4264,6 +4264,7 @@ static short constraints_list_needinv(TransInfo *t, ListBase *list)
if (con->type == CONSTRAINT_TYPE_CHILDOF) return 1;
if (con->type == CONSTRAINT_TYPE_FOLLOWPATH) return 1;
if (con->type == CONSTRAINT_TYPE_CLAMPTO) return 1;
if (con->type == CONSTRAINT_TYPE_OBJECTSOLVER) return 1;
/* constraints that require this only under special conditions */
if (con->type == CONSTRAINT_TYPE_ROTLIKE) {
@ -5441,6 +5442,7 @@ static void createTransTrackingData(bContext *C, TransInfo *t)
TransData2D *td2d;
SpaceClip *sc = CTX_wm_space_clip(C);
MovieClip *clip = ED_space_clip(sc);
ListBase *tracksbase= BKE_tracking_get_tracks(&clip->tracking);
MovieTrackingTrack *track;
MovieTrackingMarker *marker;
TransDataTracking *tdt;
@ -5454,7 +5456,7 @@ static void createTransTrackingData(bContext *C, TransInfo *t)
/* count */
t->total = 0;
track = clip->tracking.tracks.first;
track = tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track) && (track->flag&TRACK_LOCKED)==0) {
marker= BKE_tracking_get_marker(track, framenr);
@ -5481,7 +5483,7 @@ static void createTransTrackingData(bContext *C, TransInfo *t)
t->customFree= transDataTrackingFree;
/* create actual data */
track = clip->tracking.tracks.first;
track = tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track) && (track->flag&TRACK_LOCKED)==0) {
marker= BKE_tracking_get_marker(track, framenr);

@ -641,10 +641,11 @@ static void recalcData_clip(TransInfo *t)
{
SpaceClip *sc= t->sa->spacedata.first;
MovieClip *clip= ED_space_clip(sc);
ListBase *tracksbase= BKE_tracking_get_tracks(&clip->tracking);
MovieTrackingTrack *track;
if(t->state == TRANS_CANCEL) {
track= clip->tracking.tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track)) {
MovieTrackingMarker *marker= BKE_tracking_ensure_marker(track, sc->user.framenr);
@ -658,7 +659,7 @@ static void recalcData_clip(TransInfo *t)
flushTransTracking(t);
track= clip->tracking.tracks.first;
track= tracksbase->first;
while(track) {
if(TRACK_VIEW_SELECTED(sc, track)) {
if (t->mode == TFM_TRANSLATION) {

@ -158,6 +158,7 @@ typedef struct ImBuf {
#define IB_tiles (1 << 10)
#define IB_tilecache (1 << 11)
#define IB_premul (1 << 12)
#define IB_cm_predivide (1 << 13)
/*
* The bit flag is stored in the ImBuf.ftype variable.

@ -453,7 +453,8 @@ void IMB_buffer_byte_from_byte(uchar *rect_to, const uchar *rect_from,
void IMB_rect_from_float(struct ImBuf *ibuf)
{
int predivide= 0, profile_from;
int predivide= (ibuf->flags & IB_cm_predivide);
int profile_from;
/* verify we have a float buffer */
if(ibuf->rect_float==NULL)
@ -485,7 +486,8 @@ void IMB_partial_rect_from_float(struct ImBuf *ibuf, float *buffer, int x, int y
{
float *rect_float;
uchar *rect_byte;
int predivide= 0, profile_from;
int predivide= (ibuf->flags & IB_cm_predivide);
int profile_from;
/* verify we have a float buffer */
if(ibuf->rect_float==NULL || buffer==NULL)
@ -521,7 +523,8 @@ void IMB_partial_rect_from_float(struct ImBuf *ibuf, float *buffer, int x, int y
void IMB_float_from_rect(struct ImBuf *ibuf)
{
int predivide= 0, profile_from;
int predivide= (ibuf->flags & IB_cm_predivide);
int profile_from;
/* verify if we byte and float buffers */
if(ibuf->rect==NULL)
@ -546,7 +549,7 @@ void IMB_float_from_rect(struct ImBuf *ibuf)
/* no profile conversion */
void IMB_float_from_rect_simple(struct ImBuf *ibuf)
{
int predivide= 0;
int predivide= (ibuf->flags & IB_cm_predivide);
if(ibuf->rect_float==NULL)
imb_addrectfloatImBuf(ibuf);
@ -558,7 +561,8 @@ void IMB_float_from_rect_simple(struct ImBuf *ibuf)
void IMB_convert_profile(struct ImBuf *ibuf, int profile)
{
int predivide= 0, profile_from, profile_to;
int predivide= (ibuf->flags & IB_cm_predivide);
int profile_from, profile_to;
if(ibuf->profile == profile)
return;
@ -599,7 +603,8 @@ void IMB_convert_profile(struct ImBuf *ibuf, int profile)
* if the return */
float *IMB_float_profile_ensure(struct ImBuf *ibuf, int profile, int *alloc)
{
int predivide= 0, profile_from, profile_to;
int predivide= (ibuf->flags & IB_cm_predivide);
int profile_from, profile_to;
/* determine profiles */
if(ibuf->profile == IB_PROFILE_NONE)

@ -415,6 +415,8 @@ typedef struct bFollowTrackConstraint {
struct MovieClip *clip;
char track[24];
int flag, pad;
char object[24];
struct Object *camera;
} bFollowTrackConstraint;
/* Camera Solver constraints */
@ -423,6 +425,15 @@ typedef struct bCameraSolverConstraint {
int flag, pad;
} bCameraSolverConstraint;
/* Camera Solver constraints */
typedef struct bObjectSolverConstraint {
struct MovieClip *clip;
int flag, pad;
char object[24];
float invmat[4][4]; /* parent-inverse matrix to use */
struct Object *camera;
} bObjectSolverConstraint;
/* ------------------------------------------ */
/* bConstraint->type
@ -458,6 +469,7 @@ typedef enum eBConstraint_Types {
CONSTRAINT_TYPE_PIVOT, /* Pivot Constraint */
CONSTRAINT_TYPE_FOLLOWTRACK, /* Follow Track Constraint */
CONSTRAINT_TYPE_CAMERASOLVER, /* Camera Solver Constraint */
CONSTRAINT_TYPE_OBJECTSOLVER, /* Object Solver Constraint */
/* NOTE: no constraints are allowed to be added after this */
NUM_CONSTRAINT_TYPES
@ -765,6 +777,11 @@ typedef enum eCameraSolver_Flags {
CAMERASOLVER_ACTIVECLIP = (1<<0)
} eCameraSolver_Flags;
/* ObjectSolver Constraint -> flag */
typedef enum eObjectSolver_Flags {
OBJECTSOLVER_ACTIVECLIP = (1<<0)
} eObjectSolver_Flags;
/* Rigid-Body Constraint */
#define CONSTRAINT_DRAW_PIVOT 0x40
#define CONSTRAINT_DISABLE_LINKED_COLLISION 0x80

@ -112,14 +112,14 @@ typedef struct Image {
/* **************** IMAGE ********************* */
/* Image.flag */
#define IMA_FIELDS 1
#define IMA_STD_FIELD 2
#define IMA_DO_PREMUL 4
#define IMA_REFLECT 16
#define IMA_NOCOLLECT 32
#define IMA_DEPRECATED 64
#define IMA_OLD_PREMUL 128
#define IMA_FIELDS 1
#define IMA_STD_FIELD 2
#define IMA_DO_PREMUL 4
#define IMA_REFLECT 16
#define IMA_NOCOLLECT 32
#define IMA_DEPRECATED 64
#define IMA_OLD_PREMUL 128
#define IMA_CM_PREDIVIDE 256
/* Image.tpageflag */
#define IMA_TILES 1

@ -1100,7 +1100,8 @@ typedef struct Scene {
#define R_ALPHAKEY 2
/* color_mgt_flag */
#define R_COLOR_MANAGEMENT 1
#define R_COLOR_MANAGEMENT (1 << 0)
#define R_COLOR_MANAGEMENT_PREDIVIDE (1 << 1)
/* subimtype, flag options for imtype */
#define R_OPENEXR_HALF 1 /*deprecated*/

@ -142,7 +142,10 @@ typedef struct MovieTrackingSettings {
/* cleanup */
int clean_frames, clean_action;
float clean_error, pad;
float clean_error;
/* set object scale */
float object_distance; /* distance between two bundles used for object scaling */
} MovieTrackingSettings;
typedef struct MovieTrackingStabilization {
@ -172,6 +175,17 @@ typedef struct MovieTrackingReconstruction {
struct MovieReconstructedCamera *cameras; /* reconstructed cameras */
} MovieTrackingReconstruction;
typedef struct MovieTrackingObject {
struct MovieTrackingObject *next, *prev;
char name[24]; /* Name of tracking object */
int flag;
float scale; /* scale of object solution in amera space */
ListBase tracks; /* list of tracks use to tracking this object */
MovieTrackingReconstruction reconstruction; /* reconstruction data for this object */
} MovieTrackingObject;
typedef struct MovieTrackingStats {
char message[256];
} MovieTrackingStats;
@ -179,11 +193,14 @@ typedef struct MovieTrackingStats {
typedef struct MovieTracking {
MovieTrackingSettings settings; /* different tracking-related settings */
MovieTrackingCamera camera; /* camera intrinsics */
ListBase tracks; /* all tracks */
MovieTrackingReconstruction reconstruction; /* reconstruction data */
ListBase tracks; /* list of tracks used for camera object */
MovieTrackingReconstruction reconstruction; /* reconstruction data for camera object */
MovieTrackingStabilization stabilization; /* stabilization data */
MovieTrackingTrack *act_track; /* active track */
ListBase objects;
int objectnr, tot_object; /* index of active object and total number of objects */
MovieTrackingStats *stats; /* statistics displaying in clip editor */
} MovieTracking;
@ -207,6 +224,7 @@ enum {
#define TRACK_LOCKED (1<<6)
#define TRACK_CUSTOMCOLOR (1<<7)
#define TRACK_USE_2D_STAB (1<<8)
#define TRACK_PREVIEW_GRAYSCALE (1<<9)
/* MovieTrackingTrack->tracker */
#define TRACKER_KLT 0
@ -241,6 +259,9 @@ enum {
/* MovieTrackingReconstruction->flag */
#define TRACKING_RECONSTRUCTED (1<<0)
/* MovieTrackingObject->flag */
#define TRACKING_OBJECT_CAMERA (1<<0)
#define TRACKING_CLEAN_SELECT 0
#define TRACKING_CLEAN_DELETE_TRACK 1
#define TRACKING_CLEAN_DELETE_SEGMENT 2

@ -334,6 +334,7 @@ extern StructRNA RNA_MotionPath;
extern StructRNA RNA_MotionPathVert;
extern StructRNA RNA_MouseSensor;
extern StructRNA RNA_MovieSequence;
extern StructRNA RNA_MovieTrackingObject;
extern StructRNA RNA_MulticamSequence;
extern StructRNA RNA_MultiresModifier;
extern StructRNA RNA_MusgraveTexture;

@ -45,6 +45,7 @@
EnumPropertyItem constraint_type_items[] ={
{0, "", 0, "Motion Tracking", ""},
{CONSTRAINT_TYPE_CAMERASOLVER, "CAMERA_SOLVER", ICON_CONSTRAINT_DATA, "Camera Solver", ""},
{CONSTRAINT_TYPE_OBJECTSOLVER, "OBJECT_SOLVER", ICON_CONSTRAINT_DATA, "Object Solver", ""},
{CONSTRAINT_TYPE_FOLLOWTRACK, "FOLLOW_TRACK", ICON_CONSTRAINT_DATA, "Follow Track", ""},
{0, "", 0, "Transform", ""},
{CONSTRAINT_TYPE_LOCLIKE, "COPY_LOCATION", ICON_CONSTRAINT_DATA, "Copy Location", ""},
@ -163,6 +164,8 @@ static StructRNA *rna_ConstraintType_refine(struct PointerRNA *ptr)
return &RNA_FollowTrackConstraint;
case CONSTRAINT_TYPE_CAMERASOLVER:
return &RNA_CameraSolverConstraint;
case CONSTRAINT_TYPE_OBJECTSOLVER:
return &RNA_ObjectSolverConstraint;
default:
return &RNA_UnknownType;
}
@ -327,6 +330,49 @@ static void rna_SplineIKConstraint_joint_bindings_set(PointerRNA *ptr, const flo
memcpy(ikData->points, values, ikData->numpoints * sizeof(float));
}
static int rna_Constraint_cameraObject_poll(PointerRNA *ptr, PointerRNA value)
{
Object *ob= (Object*)value.data;
if (ob) {
if (ob->type == OB_CAMERA && ob != (Object*)ptr->id.data) {
return 1;
}
}
return 0;
}
static void rna_Constraint_followTrack_camera_set(PointerRNA *ptr, PointerRNA value)
{
bConstraint *con= (bConstraint*)ptr->data;
bFollowTrackConstraint *data= (bFollowTrackConstraint*)con->data;
Object *ob= (Object*)value.data;
if (ob) {
if (ob->type == OB_CAMERA && ob != (Object*)ptr->id.data) {
data->camera= ob;
}
} else {
data->camera= NULL;
}
}
static void rna_Constraint_objectSolver_camera_set(PointerRNA *ptr, PointerRNA value)
{
bConstraint *con= (bConstraint*)ptr->data;
bObjectSolverConstraint *data= (bObjectSolverConstraint*)con->data;
Object *ob= (Object*)value.data;
if (ob) {
if (ob->type == OB_CAMERA && ob != (Object*)ptr->id.data) {
data->camera= ob;
}
} else {
data->camera= NULL;
}
}
#else
EnumPropertyItem constraint_distance_items[] = {
@ -2066,6 +2112,20 @@ static void rna_def_constraint_follow_track(BlenderRNA *brna)
RNA_def_property_boolean_sdna(prop, NULL, "flag", FOLLOWTRACK_USE_3D_POSITION);
RNA_def_property_ui_text(prop, "3D Position", "Use 3D position of track to parent to");
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_update");
/* object */
prop= RNA_def_property(srna, "object", PROP_STRING, PROP_NONE);
RNA_def_property_string_sdna(prop, NULL, "object");
RNA_def_property_ui_text(prop, "Object", "Movie tracking object to follow (if empty, camera object is used)");
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_dependency_update");
/* camera */
prop= RNA_def_property(srna, "camera", PROP_POINTER, PROP_NONE);
RNA_def_property_pointer_sdna(prop, NULL, "camera");
RNA_def_property_ui_text(prop, "Camera", "Camera to which motion is parented (if empty active scene camera is used)");
RNA_def_property_flag(prop, PROP_EDITABLE);
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_dependency_update");
RNA_def_property_pointer_funcs(prop, NULL, "rna_Constraint_followTrack_camera_set", NULL, "rna_Constraint_cameraObject_poll");
}
static void rna_def_constraint_camera_solver(BlenderRNA *brna)
@ -2074,7 +2134,7 @@ static void rna_def_constraint_camera_solver(BlenderRNA *brna)
PropertyRNA *prop;
srna= RNA_def_struct(brna, "CameraSolverConstraint", "Constraint");
RNA_def_struct_ui_text(srna, "Follow Track Constraint", "Lock motion to the reconstructed camera movement");
RNA_def_struct_ui_text(srna, "Camera Solver Constraint", "Lock motion to the reconstructed camera movement");
RNA_def_struct_sdna_from(srna, "bCameraSolverConstraint", "data");
/* movie clip */
@ -2091,6 +2151,43 @@ static void rna_def_constraint_camera_solver(BlenderRNA *brna)
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_update");
}
static void rna_def_constraint_object_solver(BlenderRNA *brna)
{
StructRNA *srna;
PropertyRNA *prop;
srna= RNA_def_struct(brna, "ObjectSolverConstraint", "Constraint");
RNA_def_struct_ui_text(srna, "Object Solver Constraint", "Lock motion to the reconstructed object movement");
RNA_def_struct_sdna_from(srna, "bObjectSolverConstraint", "data");
/* movie clip */
prop= RNA_def_property(srna, "clip", PROP_POINTER, PROP_NONE);
RNA_def_property_pointer_sdna(prop, NULL, "clip");
RNA_def_property_ui_text(prop, "Movie Clip", "Movie Clip to get tracking data from");
RNA_def_property_flag(prop, PROP_EDITABLE);
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_dependency_update");
/* use default clip */
prop= RNA_def_property(srna, "use_active_clip", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "flag", CAMERASOLVER_ACTIVECLIP);
RNA_def_property_ui_text(prop, "Active Clip", "Use active clip defined in scene");
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_update");
/* object */
prop= RNA_def_property(srna, "object", PROP_STRING, PROP_NONE);
RNA_def_property_string_sdna(prop, NULL, "object");
RNA_def_property_ui_text(prop, "Object", "Movie tracking object to follow");
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_dependency_update");
/* camera */
prop= RNA_def_property(srna, "camera", PROP_POINTER, PROP_NONE);
RNA_def_property_pointer_sdna(prop, NULL, "camera");
RNA_def_property_ui_text(prop, "Camera", "Camera to which motion is parented (if empty active scene camera is used)");
RNA_def_property_flag(prop, PROP_EDITABLE);
RNA_def_property_update(prop, NC_OBJECT|ND_CONSTRAINT, "rna_Constraint_dependency_update");
RNA_def_property_pointer_funcs(prop, NULL, "rna_Constraint_objectSolver_camera_set", NULL, "rna_Constraint_cameraObject_poll");
}
/* base struct for constraints */
void RNA_def_constraint(BlenderRNA *brna)
{
@ -2203,6 +2300,7 @@ void RNA_def_constraint(BlenderRNA *brna)
rna_def_constraint_pivot(brna);
rna_def_constraint_follow_track(brna);
rna_def_constraint_camera_solver(brna);
rna_def_constraint_object_solver(brna);
}
#endif

@ -40,6 +40,7 @@
#include "BKE_image.h"
#include "WM_types.h"
#include "WM_api.h"
static EnumPropertyItem image_source_items[]= {
{IMA_SRC_FILE, "FILE", 0, "Single Image", "Single image file"},
@ -110,6 +111,7 @@ static void rna_Image_reload_update(Main *UNUSED(bmain), Scene *UNUSED(scene), P
{
Image *ima= ptr->id.data;
BKE_image_signal(ima, NULL, IMA_SIGNAL_RELOAD);
WM_main_add_notifier(NC_IMAGE|NA_EDITED, &ima->id);
DAG_id_tag_update(&ima->id, 0);
}
@ -475,6 +477,11 @@ static void rna_def_image(BlenderRNA *brna)
RNA_def_property_boolean_sdna(prop, NULL, "flag", IMA_DO_PREMUL);
RNA_def_property_ui_text(prop, "Premultiply", "Convert RGB from key alpha to premultiplied alpha");
RNA_def_property_update(prop, NC_IMAGE|ND_DISPLAY, "rna_Image_reload_update");
prop= RNA_def_property(srna, "use_color_unpremultiply", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "flag", IMA_CM_PREDIVIDE);
RNA_def_property_ui_text(prop, "Color Unpremultiply", "For premultiplied alpha images, do color space conversion on colors without alpha, to avoid fringing for images with light backgrounds");
RNA_def_property_update(prop, NC_IMAGE|ND_DISPLAY, "rna_Image_reload_update");
prop= RNA_def_property(srna, "is_dirty", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_funcs(prop, "rna_Image_dirty_get", NULL);

@ -3230,7 +3230,11 @@ static void rna_def_scene_render_data(BlenderRNA *brna)
RNA_def_property_boolean_sdna(prop, NULL, "color_mgt_flag", R_COLOR_MANAGEMENT);
RNA_def_property_ui_text(prop, "Color Management", "Use linear workflow - gamma corrected imaging pipeline");
RNA_def_property_update(prop, NC_SCENE|ND_RENDER_OPTIONS, "rna_RenderSettings_color_management_update");
prop= RNA_def_property(srna, "use_color_unpremultiply", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "color_mgt_flag", R_COLOR_MANAGEMENT_PREDIVIDE);
RNA_def_property_ui_text(prop, "Color Unpremultipy", "For premultiplied alpha render output, do color space conversion on colors without alpha, to avoid fringing on light backgrounds");
RNA_def_property_update(prop, NC_SCENE|ND_RENDER_OPTIONS, NULL);
prop= RNA_def_property(srna, "use_file_extension", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "scemode", R_EXTENSION);

@ -90,43 +90,87 @@ static void rna_tracking_defaultSettings_searchUpdate(Main *UNUSED(bmain), Scene
static void rna_tracking_tracks_begin(CollectionPropertyIterator *iter, PointerRNA *ptr)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
rna_iterator_listbase_begin(iter, &clip->tracking.tracks, NULL);
}
static void rna_tracking_tracks_add(MovieTracking *tracking, int frame, int number)
static void rna_tracking_objects_begin(CollectionPropertyIterator *iter, PointerRNA *ptr)
{
int a;
MovieClip *clip= (MovieClip*)ptr->id.data;
for(a= 0; a<number; a++)
BKE_tracking_add_track(tracking, 0, 0, frame, 1, 1);
rna_iterator_listbase_begin(iter, &clip->tracking.objects, NULL);
}
WM_main_add_notifier(NC_MOVIECLIP|NA_EDITED, NULL);
static int rna_tracking_active_object_index_get(PointerRNA *ptr)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
return clip->tracking.objectnr;
}
static void rna_tracking_active_object_index_set(PointerRNA *ptr, int value)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
clip->tracking.objectnr= value;
}
static void rna_tracking_active_object_index_range(PointerRNA *ptr, int *min, int *max)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
*min= 0;
*max= clip->tracking.tot_object-1;
*max= MAX2(0, *max);
}
static PointerRNA rna_tracking_active_track_get(PointerRNA *ptr)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
MovieTrackingTrack *act_track= BKE_tracking_active_track(&clip->tracking);
return rna_pointer_inherit_refine(ptr, &RNA_MovieTrackingTrack, clip->tracking.act_track);
return rna_pointer_inherit_refine(ptr, &RNA_MovieTrackingTrack, act_track);
}
static void rna_tracking_active_track_set(PointerRNA *ptr, PointerRNA value)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
MovieTrackingTrack *track= (MovieTrackingTrack *)value.data;
int index= BLI_findindex(&clip->tracking.tracks, track);
ListBase *tracksbase= BKE_tracking_get_tracks(&clip->tracking);
int index= BLI_findindex(tracksbase, track);
if(index>=0) clip->tracking.act_track= track;
else clip->tracking.act_track= NULL;
if(index>=0)
clip->tracking.act_track= track;
else
clip->tracking.act_track= NULL;
}
void rna_trackingTrack_name_set(PointerRNA *ptr, const char *value)
{
MovieClip *clip= (MovieClip *)ptr->id.data;
MovieTracking *tracking= &clip->tracking;
MovieTrackingTrack *track= (MovieTrackingTrack *)ptr->data;
ListBase *tracksbase= &tracking->tracks;
BLI_strncpy(track->name, value, sizeof(track->name));
BKE_track_unique_name(&clip->tracking, track);
/* TODO: it's a bit difficult to find list track came from knowing just
movie clip ID and MovieTracking structure, so keep this naive
search for a while */
if(BLI_findindex(tracksbase, track) < 0) {
MovieTrackingObject *object= tracking->objects.first;
while(object) {
if(BLI_findindex(&object->tracks, track)) {
tracksbase= &object->tracks;
break;
}
object= object->next;
}
}
BKE_track_unique_name(tracksbase, track);
}
static int rna_trackingTrack_select_get(PointerRNA *ptr)
@ -257,8 +301,102 @@ static void rna_tracking_flushUpdate(Main *UNUSED(bmain), Scene *scene, PointerR
DAG_id_tag_update(&clip->id, 0);
}
static void rna_trackingObject_tracks_begin(CollectionPropertyIterator *iter, PointerRNA *ptr)
{
MovieTrackingObject *object= (MovieTrackingObject* )ptr->data;
rna_iterator_listbase_begin(iter, &object->tracks, NULL);
}
static PointerRNA rna_tracking_active_object_get(PointerRNA *ptr)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
MovieTrackingObject *object= BLI_findlink(&clip->tracking.objects, clip->tracking.objectnr);
return rna_pointer_inherit_refine(ptr, &RNA_MovieTrackingObject, object);
}
static void rna_tracking_active_object_set(PointerRNA *ptr, PointerRNA value)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
MovieTrackingObject *object= (MovieTrackingObject *)value.data;
int index= BLI_findindex(&clip->tracking.objects, object);
if(index>=0) clip->tracking.objectnr= index;
else clip->tracking.objectnr= 0;
}
void rna_trackingObject_name_set(PointerRNA *ptr, const char *value)
{
MovieClip *clip= (MovieClip *)ptr->id.data;
MovieTrackingObject *object= (MovieTrackingObject *)ptr->data;
BLI_strncpy(object->name, value, sizeof(object->name));
BKE_tracking_object_unique_name(&clip->tracking, object);
}
static void rna_trackingObject_flushUpdate(Main *UNUSED(bmain), Scene *scene, PointerRNA *ptr)
{
MovieClip *clip= (MovieClip*)ptr->id.data;
WM_main_add_notifier(NC_OBJECT|ND_TRANSFORM, NULL);
DAG_id_tag_update(&clip->id, 0);
}
/* API */
static void add_tracks_to_base(MovieClip *clip, MovieTracking *tracking, ListBase *tracksbase, int frame, int number)
{
int a, width, height;
MovieClipUser user= {0};
user.framenr= 1;
BKE_movieclip_get_size(clip, &user, &width, &height);
for(a= 0; a<number; a++)
BKE_tracking_add_track(tracking, tracksbase, 0, 0, frame, width, height);
}
static void rna_tracking_tracks_add(ID *id, MovieTracking *tracking, int frame, int number)
{
MovieClip *clip= (MovieClip *) id;
add_tracks_to_base(clip, tracking, &tracking->tracks, frame, number);
WM_main_add_notifier(NC_MOVIECLIP|NA_EDITED, NULL);
}
static void rna_trackingObject_tracks_add(ID *id, MovieTrackingObject *object, int frame, int number)
{
MovieClip *clip= (MovieClip *) id;
ListBase *tracksbase= &object->tracks;
if(object->flag&TRACKING_OBJECT_CAMERA)
tracksbase= &clip->tracking.tracks;
add_tracks_to_base(clip, &clip->tracking, tracksbase, frame, number);
WM_main_add_notifier(NC_MOVIECLIP|NA_EDITED, NULL);
}
static MovieTrackingObject *rna_tracking_object_new(MovieTracking *tracking, const char *name)
{
MovieTrackingObject *object= BKE_tracking_new_object(tracking, name);
WM_main_add_notifier(NC_MOVIECLIP|NA_EDITED, NULL);
return object;
}
void rna_tracking_object_remove(MovieTracking *tracking, MovieTrackingObject *object)
{
BKE_tracking_remove_object(tracking, object);
WM_main_add_notifier(NC_MOVIECLIP|NA_EDITED, NULL);
}
static MovieTrackingMarker *rna_trackingTrack_marker_find_frame(MovieTrackingTrack *track, int framenr)
{
return BKE_tracking_get_marker(track, framenr);
@ -442,6 +580,14 @@ static void rna_def_trackingSettings(BlenderRNA *brna)
RNA_def_property_range(prop, 5, 1000);
RNA_def_property_update(prop, 0, "rna_tracking_defaultSettings_searchUpdate");
RNA_def_property_ui_text(prop, "Search Size", "Size of search area for newly created tracks");
/* object distance */
prop= RNA_def_property(srna, "object_distance", PROP_FLOAT, PROP_NONE);
RNA_def_property_clear_flag(prop, PROP_ANIMATABLE);
RNA_def_property_float_sdna(prop, NULL, "object_distance");
RNA_def_property_ui_text(prop, "Distance", "Distance between two bundles used for object scaling");
RNA_def_property_range(prop, 0.001, 10000);
RNA_def_property_ui_range(prop, 0.001, 10000.0, 1, 3);
}
static void rna_def_trackingCamera(BlenderRNA *brna)
@ -673,6 +819,12 @@ static void rna_def_trackingTrack(BlenderRNA *brna)
RNA_def_property_ui_text(prop, "Use Blue Channel", "Use blue channel from footage for tracking");
RNA_def_property_update(prop, NC_MOVIECLIP|ND_DISPLAY, NULL);
/* preview_grayscale */
prop= RNA_def_property(srna, "use_grayscale_preview", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "flag", TRACK_PREVIEW_GRAYSCALE);
RNA_def_property_ui_text(prop, "Grayscale", "Display what the tracking algorithm sees in the preview");
RNA_def_property_update(prop, NC_MOVIECLIP|ND_DISPLAY, NULL);
/* has bundle */
prop= RNA_def_property(srna, "has_bundle", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, NULL, "flag", TRACK_HAS_BUNDLE);
@ -883,18 +1035,18 @@ static void rna_def_trackingReconstruction(BlenderRNA *brna)
RNA_def_property_ui_text(prop, "Cameras", "Collection of solved cameras");
}
static void rna_def_trackingTracks(BlenderRNA *brna, PropertyRNA *cprop)
static void rna_def_trackingTracks(BlenderRNA *brna)
{
StructRNA *srna;
FunctionRNA *func;
PropertyRNA *prop;
RNA_def_property_srna(cprop, "MovieTrackingTracks");
srna= RNA_def_struct(brna, "MovieTrackingTracks", NULL);
RNA_def_struct_sdna(srna, "MovieTracking");
RNA_def_struct_ui_text(srna, "Movie Tracks", "Collection of movie tracking tracks");
func= RNA_def_function(srna, "add", "rna_tracking_tracks_add");
RNA_def_function_flag(func, FUNC_USE_SELF_ID);
RNA_def_function_ui_description(func, "Add a number of tracks to this movie clip");
RNA_def_int(func, "frame", 1, MINFRAME, MAXFRAME, "Frame", "Frame number to add tracks on", MINFRAME, MAXFRAME);
RNA_def_int(func, "count", 1, 0, INT_MAX, "Number", "Number of tracks to add to the movie clip", 0, INT_MAX);
@ -907,6 +1059,105 @@ static void rna_def_trackingTracks(BlenderRNA *brna, PropertyRNA *cprop)
RNA_def_property_ui_text(prop, "Active Track", "Active track in this tracking data object");
}
static void rna_def_trackingObjectTracks(BlenderRNA *brna)
{
StructRNA *srna;
FunctionRNA *func;
PropertyRNA *prop;
srna= RNA_def_struct(brna, "MovieTrackingObjectTracks", NULL);
RNA_def_struct_sdna(srna, "MovieTrackingObject");
RNA_def_struct_ui_text(srna, "Movie Tracks", "Collection of movie tracking tracks");
func= RNA_def_function(srna, "add", "rna_trackingObject_tracks_add");
RNA_def_function_flag(func, FUNC_USE_SELF_ID);
RNA_def_function_ui_description(func, "Add a number of tracks to this movie clip");
RNA_def_int(func, "frame", 1, MINFRAME, MAXFRAME, "Frame", "Frame number to add tracks on", MINFRAME, MAXFRAME);
RNA_def_int(func, "count", 1, 0, INT_MAX, "Number", "Number of tracks to add to the movie clip", 0, INT_MAX);
/* active track */
prop= RNA_def_property(srna, "active", PROP_POINTER, PROP_NONE);
RNA_def_property_struct_type(prop, "MovieTrackingTrack");
RNA_def_property_pointer_funcs(prop, "rna_tracking_active_track_get", "rna_tracking_active_track_set", NULL, NULL);
RNA_def_property_flag(prop, PROP_EDITABLE|PROP_NEVER_UNLINK);
RNA_def_property_ui_text(prop, "Active Track", "Active track in this tracking data object");
}
static void rna_def_trackingObject(BlenderRNA *brna)
{
StructRNA *srna;
PropertyRNA *prop;
srna= RNA_def_struct(brna, "MovieTrackingObject", NULL);
RNA_def_struct_ui_text(srna, "Movie tracking object data", "Match-moving object tracking and reconstruction data");
/* name */
prop= RNA_def_property(srna, "name", PROP_STRING, PROP_NONE);
RNA_def_property_ui_text(prop, "Name", "Unique name of object");
RNA_def_property_string_funcs(prop, NULL, NULL, "rna_trackingObject_name_set");
RNA_def_property_string_maxlength(prop, MAX_ID_NAME);
RNA_def_property_update(prop, NC_MOVIECLIP|NA_EDITED, NULL);
RNA_def_struct_name_property(srna, prop);
/* is_camera */
prop= RNA_def_property(srna, "is_camera", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_clear_flag(prop, PROP_EDITABLE);
RNA_def_property_boolean_sdna(prop, NULL, "flag", TRACKING_OBJECT_CAMERA);
RNA_def_property_ui_text(prop, "Camera", "Object is used for camera tracking");
RNA_def_property_update(prop, NC_MOVIECLIP|ND_DISPLAY, NULL);
/* tracks */
prop= RNA_def_property(srna, "tracks", PROP_COLLECTION, PROP_NONE);
RNA_def_property_collection_funcs(prop, "rna_trackingObject_tracks_begin", "rna_iterator_listbase_next", "rna_iterator_listbase_end", "rna_iterator_listbase_get", 0, 0, 0, 0);
RNA_def_property_struct_type(prop, "MovieTrackingTrack");
RNA_def_property_ui_text(prop, "Tracks", "Collection of tracks in this tracking data object");
RNA_def_property_srna(prop, "MovieTrackingObjectTracks");
/* reconstruction */
prop= RNA_def_property(srna, "reconstruction", PROP_POINTER, PROP_NONE);
RNA_def_property_struct_type(prop, "MovieTrackingReconstruction");
/* scale */
prop= RNA_def_property(srna, "scale", PROP_FLOAT, PROP_NONE);
RNA_def_property_clear_flag(prop, PROP_ANIMATABLE);
RNA_def_property_float_sdna(prop, NULL, "scale");
RNA_def_property_range(prop, 0.0001f, 10000.0f);
RNA_def_property_ui_range(prop, 0.0001f, 10000.0, 1, 4);
RNA_def_property_ui_text(prop, "Scale", "Scale of object solution in camera space");
RNA_def_property_update(prop, NC_MOVIECLIP|NA_EDITED, "rna_trackingObject_flushUpdate");
}
static void rna_def_trackingObjects(BlenderRNA *brna, PropertyRNA *cprop)
{
StructRNA *srna;
PropertyRNA *prop;
FunctionRNA *func;
PropertyRNA *parm;
RNA_def_property_srna(cprop, "MovieTrackingObjects");
srna= RNA_def_struct(brna, "MovieTrackingObjects", NULL);
RNA_def_struct_sdna(srna, "MovieTracking");
RNA_def_struct_ui_text(srna, "Movie Objects", "Collection of movie trackingobjects");
func= RNA_def_function(srna, "new", "rna_tracking_object_new");
RNA_def_function_ui_description(func, "Add tracking object to this movie clip");
RNA_def_string(func, "name", "", 0, "", "Name of new object");
parm= RNA_def_pointer(func, "object", "MovieTrackingObject", "", "New motion tracking object");
RNA_def_function_return(func, parm);
func= RNA_def_function(srna, "remove", "rna_tracking_object_remove");
RNA_def_function_ui_description(func, "Remove tracking object from this movie clip");
parm= RNA_def_pointer(func, "object", "MovieTrackingObject", "", "Motion tracking object to be removed");
/* active object */
prop= RNA_def_property(srna, "active", PROP_POINTER, PROP_NONE);
RNA_def_property_struct_type(prop, "MovieTrackingObject");
RNA_def_property_pointer_funcs(prop, "rna_tracking_active_object_get", "rna_tracking_active_object_set", NULL, NULL);
RNA_def_property_flag(prop, PROP_EDITABLE|PROP_NEVER_UNLINK);
RNA_def_property_ui_text(prop, "Active Object", "Active object in this tracking data object");
}
static void rna_def_tracking(BlenderRNA *brna)
{
StructRNA *srna;
@ -915,8 +1166,11 @@ static void rna_def_tracking(BlenderRNA *brna)
rna_def_trackingSettings(brna);
rna_def_trackingCamera(brna);
rna_def_trackingTrack(brna);
rna_def_trackingTracks(brna);
rna_def_trackingObjectTracks(brna);
rna_def_trackingStabilization(brna);
rna_def_trackingReconstruction(brna);
rna_def_trackingObject(brna);
srna= RNA_def_struct(brna, "MovieTracking", NULL);
RNA_def_struct_ui_text(srna, "Movie tracking data", "Match-moving data for tracking");
@ -934,7 +1188,7 @@ static void rna_def_tracking(BlenderRNA *brna)
RNA_def_property_collection_funcs(prop, "rna_tracking_tracks_begin", "rna_iterator_listbase_next", "rna_iterator_listbase_end", "rna_iterator_listbase_get", 0, 0, 0, 0);
RNA_def_property_struct_type(prop, "MovieTrackingTrack");
RNA_def_property_ui_text(prop, "Tracks", "Collection of tracks in this tracking data object");
rna_def_trackingTracks(brna, prop);
RNA_def_property_srna(prop, "MovieTrackingTracks");
/* stabilization */
prop= RNA_def_property(srna, "stabilization", PROP_POINTER, PROP_NONE);
@ -943,6 +1197,20 @@ static void rna_def_tracking(BlenderRNA *brna)
/* reconstruction */
prop= RNA_def_property(srna, "reconstruction", PROP_POINTER, PROP_NONE);
RNA_def_property_struct_type(prop, "MovieTrackingReconstruction");
/* objects */
prop= RNA_def_property(srna, "objects", PROP_COLLECTION, PROP_NONE);
RNA_def_property_collection_funcs(prop, "rna_tracking_objects_begin", "rna_iterator_listbase_next", "rna_iterator_listbase_end", "rna_iterator_listbase_get", 0, 0, 0, 0);
RNA_def_property_struct_type(prop, "MovieTrackingObject");
RNA_def_property_ui_text(prop, "Objects", "Collection of objects in this tracking data object");
rna_def_trackingObjects(brna, prop);
/* active object index */
prop= RNA_def_property(srna, "active_object_index", PROP_INT, PROP_NONE);
RNA_def_property_int_sdna(prop, NULL, "objectnr");
RNA_def_property_int_funcs(prop, "rna_tracking_active_object_index_get", "rna_tracking_active_object_index_set", "rna_tracking_active_object_index_range");
RNA_def_property_ui_text(prop, "Active Object Index", "Index of active object");
RNA_def_property_update(prop, NC_MOVIECLIP|ND_DISPLAY, NULL);
}
void RNA_def_tracking(BlenderRNA *brna)

@ -607,7 +607,7 @@ void generate_preview(void *data, bNode *node, CompBuf *stackbuf)
bNodePreview *preview= node->preview;
int xsize, ysize;
int profile_from= (rd->color_mgt_flag & R_COLOR_MANAGEMENT)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int predivide= 0;
int predivide= (rd->color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE);
int dither= 0;
unsigned char *rect;

@ -62,7 +62,7 @@ static bNodeSocketTemplate cmp_node_rlayers_out[]= {
float *node_composit_get_float_buffer(RenderData *rd, ImBuf *ibuf, int *alloc)
{
float *rect;
int predivide= 0;
int predivide= (ibuf->flags & IB_cm_predivide);
*alloc= FALSE;

@ -1154,7 +1154,7 @@ void RE_ResultGet32(Render *re, unsigned int *rect)
}
else if(rres.rectf) {
int profile_from= (re->r.color_mgt_flag & R_COLOR_MANAGEMENT)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int predivide= 0;
int predivide= (re->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE);
int dither= 0;
IMB_buffer_byte_from_float((unsigned char*)rect, rres.rectf,
@ -2556,7 +2556,7 @@ static void do_render_seq(Render * re)
* render engine delivers */
int profile_to= (re->r.color_mgt_flag & R_COLOR_MANAGEMENT)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int profile_from= (ibuf->profile == IB_PROFILE_LINEAR_RGB)? IB_PROFILE_LINEAR_RGB: IB_PROFILE_SRGB;
int predivide= 0;
int predivide= (re->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE);
if (!rr->rectf)
rr->rectf= MEM_mallocN(4*sizeof(float)*rr->rectx*rr->recty, "render_seq rectf");
@ -2995,7 +2995,8 @@ static int do_write_image_or_movie(Render *re, Main *bmain, Scene *scene, bMovie
}
}
else {
ImBuf *ibuf= IMB_allocImBuf(rres.rectx, rres.recty, scene->r.im_format.planes, 0);
int flags = (scene->r.color_mgt_flag & R_COLOR_MANAGEMENT_PREDIVIDE)? IB_cm_predivide: 0;
ImBuf *ibuf= IMB_allocImBuf(rres.rectx, rres.recty, scene->r.im_format.planes, flags);
/* if not exists, BKE_write_ibuf makes one */
ibuf->rect= (unsigned int *)rres.rect32;

@ -43,7 +43,7 @@ public:
virtual CValue* GetReplica();
#ifdef WITH_PYTHON
virtual PyObject* ConvertValueToPython() {
return PyUnicode_FromString(m_strString.Ptr());
return PyUnicode_From_STR_String(m_strString);
}
#endif // WITH_PYTHON

@ -221,7 +221,7 @@ public:
//static PyObject* PyMake(PyObject*,PyObject*);
virtual PyObject *py_repr(void)
{
return PyUnicode_FromString((const char*)GetText());
return PyUnicode_From_STR_String(GetText());
}
virtual PyObject* ConvertValueToPython() {

@ -98,21 +98,24 @@ void BL_Material::Initialize()
}
}
void BL_Material::SetConversionRGB(unsigned int *nrgb) {
void BL_Material::SetConversionRGB(unsigned int *nrgb)
{
rgb[0]=*nrgb++;
rgb[1]=*nrgb++;
rgb[2]=*nrgb++;
rgb[3]=*nrgb;
}
void BL_Material::GetConversionRGB(unsigned int *nrgb) {
void BL_Material::GetConversionRGB(unsigned int *nrgb)
{
*nrgb++ = rgb[0];
*nrgb++ = rgb[1];
*nrgb++ = rgb[2];
*nrgb = rgb[3];
}
void BL_Material::SetConversionUV(const STR_String& name, MT_Point2 *nuv) {
void BL_Material::SetConversionUV(const STR_String& name, MT_Point2 *nuv)
{
uvName = name;
uv[0] = *nuv++;
uv[1] = *nuv++;
@ -120,13 +123,15 @@ void BL_Material::SetConversionUV(const STR_String& name, MT_Point2 *nuv) {
uv[3] = *nuv;
}
void BL_Material::GetConversionUV(MT_Point2 *nuv){
void BL_Material::GetConversionUV(MT_Point2 *nuv)
{
*nuv++ = uv[0];
*nuv++ = uv[1];
*nuv++ = uv[2];
*nuv = uv[3];
}
void BL_Material::SetConversionUV2(const STR_String& name, MT_Point2 *nuv) {
void BL_Material::SetConversionUV2(const STR_String& name, MT_Point2 *nuv)
{
uv2Name = name;
uv2[0] = *nuv++;
uv2[1] = *nuv++;
@ -134,7 +139,8 @@ void BL_Material::SetConversionUV2(const STR_String& name, MT_Point2 *nuv) {
uv2[3] = *nuv;
}
void BL_Material::GetConversionUV2(MT_Point2 *nuv){
void BL_Material::GetConversionUV2(MT_Point2 *nuv)
{
*nuv++ = uv2[0];
*nuv++ = uv2[1];
*nuv++ = uv2[2];

@ -38,10 +38,12 @@ extern "C" {
}
// (n&(n-1)) zeros the least significant bit of n
static int is_power_of_2_i(int num) {
static int is_power_of_2_i(int num)
{
return ((num)&(num-1))==0;
}
static int power_of_2_min_i(int num) {
static int power_of_2_min_i(int num)
{
while (!is_power_of_2_i(num))
num= num&(num-1);
return num;

@ -106,7 +106,7 @@ public:
#ifdef WITH_PYTHON
// --------------------------------
virtual PyObject* py_repr(void) { return PyUnicode_FromString(mMaterial->matname.ReadPtr()); }
virtual PyObject* py_repr(void) { return PyUnicode_From_STR_String(mMaterial->matname); }
static PyObject* pyattr_get_shader(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef);
static PyObject* pyattr_get_materialIndex(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef);

@ -255,7 +255,8 @@ void KX_Dome::CalculateImageSize(void)
}
}
bool KX_Dome::CreateDL(){
bool KX_Dome::CreateDL()
{
dlistId = glGenLists((GLsizei) m_numimages);
if (dlistId != 0) {
if(m_mode == DOME_FISHEYE || m_mode == DOME_TRUNCATED_FRONT || m_mode == DOME_TRUNCATED_REAR){

@ -41,6 +41,26 @@ extern "C" {
#define BGE_FONT_RES 100
std::vector<STR_String> split_string(STR_String str)
{
std::vector<STR_String> text = std::vector<STR_String>();
/* Split the string upon new lines */
int begin=0, end=0;
while (end < str.Length())
{
if(str.GetAt(end) == '\n')
{
text.push_back(str.Mid(begin, end-begin));
begin = end+1;
}
end++;
}
//Now grab the last line
text.push_back(str.Mid(begin, end-begin));
return text;
}
KX_FontObject::KX_FontObject( void* sgReplicationInfo,
SG_Callbacks callbacks,
RAS_IRenderTools* rendertools,
@ -52,8 +72,10 @@ KX_FontObject::KX_FontObject( void* sgReplicationInfo,
m_rendertools(rendertools)
{
Curve *text = static_cast<Curve *> (ob->data);
m_text = text->str;
m_text = split_string(text->str);
m_fsize = text->fsize;
m_line_spacing = text->linedist;
m_offset = MT_Vector3(text->xof, text->yof, 0);
/* FO_BUILTIN_NAME != "default" */
/* I hope at some point Blender (2.5x) can have a single font */
@ -95,20 +117,45 @@ void KX_FontObject::ProcessReplica()
void KX_FontObject::DrawText()
{
/* Allow for some logic brick control */
if(this->GetProperty("text"))
m_text = split_string(this->GetProperty("text")->GetText());
/* only draws the text if visible */
if(this->GetVisible() == 0) return;
/* update the animated color */
this->GetObjectColor().getValue(m_color);
/* XXX 2DO - handle multiple lines */
/* HARDCODED MULTIPLICATION FACTOR - this will affect the render resolution directly */
float RES = BGE_FONT_RES * m_resolution;
float size = m_fsize * m_object->size[0] * RES;
float aspect = 1.f / (m_object->size[0] * RES);
m_rendertools->RenderText3D(m_fontid, m_text, int(size), m_dpi, m_color, this->GetOpenGLMatrix(), aspect);
/* Get a working copy of the OpenGLMatrix to use */
double mat[16];
memcpy(mat, this->GetOpenGLMatrix(), sizeof(double)*16);
/* Account for offset */
MT_Vector3 offset = this->NodeGetWorldOrientation() * m_offset * this->NodeGetWorldScaling();
mat[12] += offset[0]; mat[13] += offset[1]; mat[14] += offset[2];
/* Orient the spacing vector */
MT_Vector3 spacing = MT_Vector3(0, m_fsize*m_line_spacing, 0);
spacing = this->NodeGetWorldOrientation() * spacing * this->NodeGetWorldScaling()[1];
/* Draw each line, taking spacing into consideration */
for(int i=0; i<m_text.size(); ++i)
{
if (i!=0)
{
mat[12] -= spacing[0];
mat[13] -= spacing[1];
mat[14] -= spacing[2];
}
m_rendertools->RenderText3D(m_fontid, m_text[i], int(size), m_dpi, m_color, mat, aspect);
}
}
#ifdef WITH_PYTHON
@ -150,11 +197,35 @@ PyMethodDef KX_FontObject::Methods[] = {
};
PyAttributeDef KX_FontObject::Attributes[] = {
KX_PYATTRIBUTE_STRING_RW("text", 0, 280, false, KX_FontObject, m_text), //arbitrary limit. 280 = 140 unicode chars in unicode
//KX_PYATTRIBUTE_STRING_RW("text", 0, 280, false, KX_FontObject, m_text[0]), //arbitrary limit. 280 = 140 unicode chars in unicode
KX_PYATTRIBUTE_RW_FUNCTION("text", KX_FontObject, pyattr_get_text, pyattr_set_text),
KX_PYATTRIBUTE_FLOAT_RW("size", 0.0001f, 10000.0f, KX_FontObject, m_fsize),
KX_PYATTRIBUTE_FLOAT_RW("resolution", 0.0001f, 10000.0f, KX_FontObject, m_resolution),
/* KX_PYATTRIBUTE_INT_RW("dpi", 0, 10000, false, KX_FontObject, m_dpi), */// no real need for expose this I think
{ NULL } //Sentinel
};
PyObject* KX_FontObject::pyattr_get_text(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef)
{
KX_FontObject* self= static_cast<KX_FontObject*>(self_v);
STR_String str = STR_String();
for(int i=0; i<self->m_text.size(); ++i)
{
if(i!=0)
str += '\n';
str += self->m_text[i];
}
return PyUnicode_From_STR_String(str);
}
int KX_FontObject::pyattr_set_text(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value)
{
KX_FontObject* self= static_cast<KX_FontObject*>(self_v);
if(!PyUnicode_Check(value))
return PY_SET_ATTR_FAIL;
char* chars = _PyUnicode_AsString(value);
self->m_text = split_string(STR_String(chars));
return PY_SET_ATTR_SUCCESS;
}
#endif // WITH_PYTHON

@ -57,13 +57,15 @@ public:
virtual void ProcessReplica();
protected:
STR_String m_text;
std::vector<STR_String> m_text;
Object* m_object;
int m_fontid;
int m_dpi;
float m_fsize;
float m_resolution;
float m_color[4];
float m_line_spacing;
MT_Vector3 m_offset;
class RAS_IRenderTools* m_rendertools; //needed for drawing routine
@ -76,6 +78,8 @@ public:
*/
#ifdef WITH_PYTHON
static PyObject* pyattr_get_text(void* self_v, const KX_PYATTRIBUTE_DEF *attrdef);
static int pyattr_set_text(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value);
#endif
};

@ -888,7 +888,7 @@ public:
*/
virtual PyObject* py_repr(void)
{
return PyUnicode_FromString(GetName().ReadPtr());
return PyUnicode_From_STR_String(GetName());
}
KX_PYMETHOD_O(KX_GameObject,SetWorldPosition);

@ -247,12 +247,14 @@ PyObject* KX_MeshProxy::pyattr_get_materials(void *self_v, const KX_PYATTRIBUTE_
return materials;
}
PyObject * KX_MeshProxy::pyattr_get_numMaterials(void * selfv, const KX_PYATTRIBUTE_DEF * attrdef) {
PyObject * KX_MeshProxy::pyattr_get_numMaterials(void * selfv, const KX_PYATTRIBUTE_DEF * attrdef)
{
KX_MeshProxy * self = static_cast<KX_MeshProxy *> (selfv);
return PyLong_FromSsize_t(self->m_meshobj->NumMaterials());
}
PyObject * KX_MeshProxy::pyattr_get_numPolygons(void * selfv, const KX_PYATTRIBUTE_DEF * attrdef) {
PyObject * KX_MeshProxy::pyattr_get_numPolygons(void * selfv, const KX_PYATTRIBUTE_DEF * attrdef)
{
KX_MeshProxy * self = static_cast<KX_MeshProxy *> (selfv);
return PyLong_FromSsize_t(self->m_meshobj->NumPolygons());
}

@ -617,7 +617,7 @@ public:
static PyObject* pyattr_get_drawing_callback_post(void *self_v, const KX_PYATTRIBUTE_DEF *attrdef);
static int pyattr_set_drawing_callback_post(void *selv_v, const KX_PYATTRIBUTE_DEF *attrdef, PyObject *value);
virtual PyObject* py_repr(void) { return PyUnicode_FromString(GetName().ReadPtr()); }
virtual PyObject* py_repr(void) { return PyUnicode_From_STR_String(GetName()); }
/* getitem/setitem */
static PyMappingMethods Mapping;