Reduce amount of deprecated symbols used from FFmpeg

This switches some areas of Blender which are related on FFmpeg stuff
from deprecated symbols to currently supported one.

Pretty straightforward changes based on documentation of FFmpeg's
API which symbols should be now used.

This should make Blender compatible with recent FFmpeg 0.11.

Should be no functional changes.
This commit is contained in:
Sergey Sharybin 2012-06-18 10:29:11 +00:00
parent 5e6e9bd616
commit 0d64e050ea
9 changed files with 87 additions and 64 deletions

@ -143,23 +143,23 @@ void AUD_FFMPEGReader::init()
switch(m_codecCtx->sample_fmt) switch(m_codecCtx->sample_fmt)
{ {
case SAMPLE_FMT_U8: case AV_SAMPLE_FMT_U8:
m_convert = AUD_convert_u8_float; m_convert = AUD_convert_u8_float;
m_specs.format = AUD_FORMAT_U8; m_specs.format = AUD_FORMAT_U8;
break; break;
case SAMPLE_FMT_S16: case AV_SAMPLE_FMT_S16:
m_convert = AUD_convert_s16_float; m_convert = AUD_convert_s16_float;
m_specs.format = AUD_FORMAT_S16; m_specs.format = AUD_FORMAT_S16;
break; break;
case SAMPLE_FMT_S32: case AV_SAMPLE_FMT_S32:
m_convert = AUD_convert_s32_float; m_convert = AUD_convert_s32_float;
m_specs.format = AUD_FORMAT_S32; m_specs.format = AUD_FORMAT_S32;
break; break;
case SAMPLE_FMT_FLT: case AV_SAMPLE_FMT_FLT:
m_convert = AUD_convert_copy<float>; m_convert = AUD_convert_copy<float>;
m_specs.format = AUD_FORMAT_FLOAT32; m_specs.format = AUD_FORMAT_FLOAT32;
break; break;
case SAMPLE_FMT_DBL: case AV_SAMPLE_FMT_DBL:
m_convert = AUD_convert_double_float; m_convert = AUD_convert_double_float;
m_specs.format = AUD_FORMAT_FLOAT64; m_specs.format = AUD_FORMAT_FLOAT64;
break; break;
@ -189,7 +189,7 @@ AUD_FFMPEGReader::AUD_FFMPEGReader(std::string filename) :
} }
catch(AUD_Exception&) catch(AUD_Exception&)
{ {
av_close_input_file(m_formatCtx); avformat_close_input(&m_formatCtx);
throw; throw;
} }
} }
@ -227,7 +227,7 @@ AUD_FFMPEGReader::AUD_FFMPEGReader(AUD_Reference<AUD_Buffer> buffer) :
} }
catch(AUD_Exception&) catch(AUD_Exception&)
{ {
av_close_input_stream(m_formatCtx); avformat_close_input(&m_formatCtx);
av_free(m_aviocontext); av_free(m_aviocontext);
throw; throw;
} }
@ -239,7 +239,7 @@ AUD_FFMPEGReader::~AUD_FFMPEGReader()
if(m_aviocontext) if(m_aviocontext)
{ {
av_close_input_stream(m_formatCtx); avformat_close_input(&m_formatCtx);
av_free(m_aviocontext); av_free(m_aviocontext);
} }
else else

@ -133,23 +133,23 @@ AUD_FFMPEGWriter::AUD_FFMPEGWriter(std::string filename, AUD_DeviceSpecs specs,
{ {
case AUD_FORMAT_U8: case AUD_FORMAT_U8:
m_convert = AUD_convert_float_u8; m_convert = AUD_convert_float_u8;
m_codecCtx->sample_fmt = SAMPLE_FMT_U8; m_codecCtx->sample_fmt = AV_SAMPLE_FMT_U8;
break; break;
case AUD_FORMAT_S16: case AUD_FORMAT_S16:
m_convert = AUD_convert_float_s16; m_convert = AUD_convert_float_s16;
m_codecCtx->sample_fmt = SAMPLE_FMT_S16; m_codecCtx->sample_fmt = AV_SAMPLE_FMT_S16;
break; break;
case AUD_FORMAT_S32: case AUD_FORMAT_S32:
m_convert = AUD_convert_float_s32; m_convert = AUD_convert_float_s32;
m_codecCtx->sample_fmt = SAMPLE_FMT_S32; m_codecCtx->sample_fmt = AV_SAMPLE_FMT_S32;
break; break;
case AUD_FORMAT_FLOAT32: case AUD_FORMAT_FLOAT32:
m_convert = AUD_convert_copy<float>; m_convert = AUD_convert_copy<float>;
m_codecCtx->sample_fmt = SAMPLE_FMT_FLT; m_codecCtx->sample_fmt = AV_SAMPLE_FMT_FLT;
break; break;
case AUD_FORMAT_FLOAT64: case AUD_FORMAT_FLOAT64:
m_convert = AUD_convert_float_double; m_convert = AUD_convert_float_double;
m_codecCtx->sample_fmt = SAMPLE_FMT_DBL; m_codecCtx->sample_fmt = AV_SAMPLE_FMT_DBL;
break; break;
default: default:
AUD_THROW(AUD_ERROR_FFMPEG, format_error); AUD_THROW(AUD_ERROR_FFMPEG, format_error);

@ -40,7 +40,6 @@
#endif #endif
#include <libswscale/swscale.h> #include <libswscale/swscale.h>
#include <libavcodec/opt.h>
#if (LIBAVFORMAT_VERSION_MAJOR > 52) || ((LIBAVFORMAT_VERSION_MAJOR >= 52) && (LIBAVFORMAT_VERSION_MINOR >= 105)) #if (LIBAVFORMAT_VERSION_MAJOR > 52) || ((LIBAVFORMAT_VERSION_MAJOR >= 52) && (LIBAVFORMAT_VERSION_MINOR >= 105))
#define FFMPEG_HAVE_AVIO 1 #define FFMPEG_HAVE_AVIO 1
@ -76,6 +75,20 @@
#define FFMPEG_FFV1_ALPHA_SUPPORTED #define FFMPEG_FFV1_ALPHA_SUPPORTED
#endif #endif
#if ((LIBAVFORMAT_VERSION_MAJOR < 53) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR < 24)) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR < 24) && (LIBAVFORMAT_VERSION_MICRO < 2)))
#define avformat_close_input(x) av_close_input_file(*(x))
#endif
#if ((LIBAVFORMAT_VERSION_MAJOR > 53) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR > 32)) || ((LIBAVFORMAT_VERSION_MAJOR == 53) && (LIBAVFORMAT_VERSION_MINOR == 24) && (LIBAVFORMAT_VERSION_MICRO >= 100)))
void ff_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp);
static inline
void av_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp)
{
ff_update_cur_dts(s, ref_st, timestamp);
}
#endif
#ifndef FFMPEG_HAVE_AVIO #ifndef FFMPEG_HAVE_AVIO
#define AVIO_FLAG_WRITE URL_WRONLY #define AVIO_FLAG_WRITE URL_WRONLY
#define avio_open url_fopen #define avio_open url_fopen

@ -42,8 +42,8 @@
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
#include <libavcodec/avcodec.h> #include <libavcodec/avcodec.h>
#include <libavutil/rational.h> #include <libavutil/rational.h>
#include <libavutil/samplefmt.h>
#include <libswscale/swscale.h> #include <libswscale/swscale.h>
#include <libavcodec/opt.h>
#include "MEM_guardedalloc.h" #include "MEM_guardedalloc.h"
@ -615,7 +615,7 @@ static AVStream *alloc_audio_stream(RenderData *rd, int codec_id, AVFormatContex
c->sample_rate = rd->ffcodecdata.audio_mixrate; c->sample_rate = rd->ffcodecdata.audio_mixrate;
c->bit_rate = ffmpeg_audio_bitrate * 1000; c->bit_rate = ffmpeg_audio_bitrate * 1000;
c->sample_fmt = SAMPLE_FMT_S16; c->sample_fmt = AV_SAMPLE_FMT_S16;
c->channels = rd->ffcodecdata.audio_channels; c->channels = rd->ffcodecdata.audio_channels;
codec = avcodec_find_encoder(c->codec_id); codec = avcodec_find_encoder(c->codec_id);
if (!codec) { if (!codec) {
@ -657,11 +657,21 @@ static AVStream *alloc_audio_stream(RenderData *rd, int codec_id, AVFormatContex
} }
/* essential functions -- start, append, end */ /* essential functions -- start, append, end */
static void ffmpeg_dict_set_int(AVDictionary **dict, const char *key, int value)
{
char buffer[32];
BLI_snprintf(buffer, sizeof(buffer), "%d", value);
av_dict_set(dict, key, buffer, 0);
}
static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, ReportList *reports) static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, ReportList *reports)
{ {
/* Handle to the output file */ /* Handle to the output file */
AVFormatContext *of; AVFormatContext *of;
AVOutputFormat *fmt; AVOutputFormat *fmt;
AVDictionary *opts = NULL;
char name[256]; char name[256];
const char **exts; const char **exts;
@ -707,13 +717,14 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
of->oformat = fmt; of->oformat = fmt;
of->packet_size = rd->ffcodecdata.mux_packet_size; of->packet_size = rd->ffcodecdata.mux_packet_size;
if (ffmpeg_audio_codec != CODEC_ID_NONE) { if (ffmpeg_audio_codec != CODEC_ID_NONE) {
of->mux_rate = rd->ffcodecdata.mux_rate; ffmpeg_dict_set_int(&opts, "muxrate", rd->ffcodecdata.mux_rate);
} }
else { else {
of->mux_rate = 0; av_dict_set(&opts, "muxrate", "0", 0);
} }
of->preload = (int)(0.5 * AV_TIME_BASE); ffmpeg_dict_set_int(&opts, "preload", (int)(0.5 * AV_TIME_BASE));
of->max_delay = (int)(0.7 * AV_TIME_BASE); of->max_delay = (int)(0.7 * AV_TIME_BASE);
fmt->audio_codec = ffmpeg_audio_codec; fmt->audio_codec = ffmpeg_audio_codec;
@ -776,6 +787,7 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
fmt->audio_codec = CODEC_ID_PCM_S16LE; fmt->audio_codec = CODEC_ID_PCM_S16LE;
if (ffmpeg_audio_codec != CODEC_ID_NONE && rd->ffcodecdata.audio_mixrate != 48000 && rd->ffcodecdata.audio_channels != 2) { if (ffmpeg_audio_codec != CODEC_ID_NONE && rd->ffcodecdata.audio_mixrate != 48000 && rd->ffcodecdata.audio_channels != 2) {
BKE_report(reports, RPT_ERROR, "FFMPEG only supports 48khz / stereo audio for DV!"); BKE_report(reports, RPT_ERROR, "FFMPEG only supports 48khz / stereo audio for DV!");
av_dict_free(&opts);
return 0; return 0;
} }
} }
@ -785,6 +797,7 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
printf("alloc video stream %p\n", video_stream); printf("alloc video stream %p\n", video_stream);
if (!video_stream) { if (!video_stream) {
BKE_report(reports, RPT_ERROR, "Error initializing video stream."); BKE_report(reports, RPT_ERROR, "Error initializing video stream.");
av_dict_free(&opts);
return 0; return 0;
} }
} }
@ -793,27 +806,26 @@ static int start_ffmpeg_impl(struct RenderData *rd, int rectx, int recty, Report
audio_stream = alloc_audio_stream(rd, fmt->audio_codec, of); audio_stream = alloc_audio_stream(rd, fmt->audio_codec, of);
if (!audio_stream) { if (!audio_stream) {
BKE_report(reports, RPT_ERROR, "Error initializing audio stream."); BKE_report(reports, RPT_ERROR, "Error initializing audio stream.");
av_dict_free(&opts);
return 0; return 0;
} }
} }
if (av_set_parameters(of, NULL) < 0) {
BKE_report(reports, RPT_ERROR, "Error setting output parameters.");
return 0;
}
if (!(fmt->flags & AVFMT_NOFILE)) { if (!(fmt->flags & AVFMT_NOFILE)) {
if (avio_open(&of->pb, name, AVIO_FLAG_WRITE) < 0) { if (avio_open(&of->pb, name, AVIO_FLAG_WRITE) < 0) {
BKE_report(reports, RPT_ERROR, "Could not open file for writing."); BKE_report(reports, RPT_ERROR, "Could not open file for writing.");
av_dict_free(&opts);
return 0; return 0;
} }
} }
if (avformat_write_header(of, NULL) < 0) {
if (av_write_header(of) < 0) {
BKE_report(reports, RPT_ERROR, "Could not initialize streams. Probably unsupported codec combination."); BKE_report(reports, RPT_ERROR, "Could not initialize streams. Probably unsupported codec combination.");
av_dict_free(&opts);
return 0; return 0;
} }
outfile = of; outfile = of;
av_dump_format(of, 0, name, 1); av_dump_format(of, 0, name, 1);
av_dict_free(&opts);
return 1; return 1;
} }

@ -445,7 +445,7 @@ static int startffmpeg(struct anim *anim)
int i, videoStream; int i, videoStream;
AVCodec *pCodec; AVCodec *pCodec;
AVFormatContext *pFormatCtx; AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx; AVCodecContext *pCodecCtx;
int frs_num; int frs_num;
double frs_den; double frs_den;
@ -464,7 +464,7 @@ static int startffmpeg(struct anim *anim)
do_init_ffmpeg(); do_init_ffmpeg();
if (av_open_input_file(&pFormatCtx, anim->name, NULL, 0, NULL) != 0) { if (avformat_open_input(&pFormatCtx, anim->name, NULL, NULL) != 0) {
return -1; return -1;
} }

@ -531,13 +531,6 @@ static struct proxy_output_ctx *alloc_proxy_output_ffmpeg(
rv->c->flags |= CODEC_FLAG_GLOBAL_HEADER; rv->c->flags |= CODEC_FLAG_GLOBAL_HEADER;
} }
if (av_set_parameters(rv->of, NULL) < 0) {
fprintf(stderr, "Couldn't set output parameters? "
"Proxy not built!\n");
av_free(rv->of);
return 0;
}
if (avio_open(&rv->of->pb, fname, AVIO_FLAG_WRITE) < 0) { if (avio_open(&rv->of->pb, fname, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Couldn't open outputfile! " fprintf(stderr, "Couldn't open outputfile! "
"Proxy not built!\n"); "Proxy not built!\n");
@ -574,7 +567,12 @@ static struct proxy_output_ctx *alloc_proxy_output_ffmpeg(
NULL, NULL, NULL); NULL, NULL, NULL);
} }
av_write_header(rv->of); if (avformat_write_header(rv->of, NULL) < 0) {
fprintf(stderr, "Couldn't set output parameters? "
"Proxy not built!\n");
av_free(rv->of);
return 0;
}
return rv; return rv;
} }
@ -737,7 +735,7 @@ static IndexBuildContext *index_ffmpeg_create_context(struct anim *anim, IMB_Tim
memset(context->proxy_ctx, 0, sizeof(context->proxy_ctx)); memset(context->proxy_ctx, 0, sizeof(context->proxy_ctx));
memset(context->indexer, 0, sizeof(context->indexer)); memset(context->indexer, 0, sizeof(context->indexer));
if (av_open_input_file(&context->iFormatCtx, anim->name, NULL, 0, NULL) != 0) { if (avformat_open_input(&context->iFormatCtx, anim->name, NULL, NULL) != 0) {
MEM_freeN(context); MEM_freeN(context);
return NULL; return NULL;
} }

@ -247,7 +247,7 @@ void do_init_ffmpeg(void)
static int isffmpeg(const char *filename) static int isffmpeg(const char *filename)
{ {
AVFormatContext *pFormatCtx; AVFormatContext *pFormatCtx = NULL;
unsigned int i; unsigned int i;
int videoStream; int videoStream;
AVCodec *pCodec; AVCodec *pCodec;
@ -268,7 +268,7 @@ static int isffmpeg(const char *filename)
return 0; return 0;
} }
if (av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL) != 0) { if (avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) {
if (UTIL_DEBUG) fprintf(stderr, "isffmpeg: av_open_input_file failed\n"); if (UTIL_DEBUG) fprintf(stderr, "isffmpeg: av_open_input_file failed\n");
return 0; return 0;
} }

@ -162,14 +162,14 @@ void VideoFFmpeg::initParams (short width, short height, float rate, bool image)
} }
int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams) int VideoFFmpeg::openStream(const char *filename, AVInputFormat *inputFormat, AVDictionary **formatParams)
{ {
AVFormatContext *formatCtx; AVFormatContext *formatCtx = NULL;
int i, videoStream; int i, videoStream;
AVCodec *codec; AVCodec *codec;
AVCodecContext *codecCtx; AVCodecContext *codecCtx;
if (av_open_input_file(&formatCtx, filename, inputFormat, 0, formatParams)!=0) if (avformat_open_input(&formatCtx, filename, inputFormat, formatParams)!=0)
return -1; return -1;
if (av_find_stream_info(formatCtx)<0) if (av_find_stream_info(formatCtx)<0)
@ -545,11 +545,7 @@ void VideoFFmpeg::openFile (char * filename)
// but it is really not desirable to seek on http file, so force streaming. // but it is really not desirable to seek on http file, so force streaming.
// It would be good to find this information from the context but there are no simple indication // It would be good to find this information from the context but there are no simple indication
!strncmp(filename, "http://", 7) || !strncmp(filename, "http://", 7) ||
#ifdef FFMPEG_PB_IS_POINTER (m_formatCtx->pb && !m_formatCtx->pb->seekable)
(m_formatCtx->pb && m_formatCtx->pb->is_streamed)
#else
m_formatCtx->pb.is_streamed
#endif
) )
{ {
// the file is in fact a streaming source, treat as cam to prevent seeking // the file is in fact a streaming source, treat as cam to prevent seeking
@ -586,14 +582,12 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
{ {
// open camera source // open camera source
AVInputFormat *inputFormat; AVInputFormat *inputFormat;
AVFormatParameters formatParams; AVDictionary *formatParams = NULL;
AVRational frameRate;
char filename[28], rateStr[20]; char filename[28], rateStr[20];
char *p; char *p;
do_init_ffmpeg(); do_init_ffmpeg();
memset(&formatParams, 0, sizeof(formatParams));
#ifdef WIN32 #ifdef WIN32
// video capture on windows only through Video For Windows driver // video capture on windows only through Video For Windows driver
inputFormat = av_find_input_format("vfwcap"); inputFormat = av_find_input_format("vfwcap");
@ -623,7 +617,13 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
sprintf(filename, "/dev/dv1394/%d", camIdx); sprintf(filename, "/dev/dv1394/%d", camIdx);
} else } else
{ {
inputFormat = av_find_input_format("video4linux"); const char *formats[] = {"video4linux2,v4l2", "video4linux2", "video4linux"};
int i, formatsCount = sizeof(formats) / sizeof(char*);
for (i = 0; i < formatsCount; i++) {
inputFormat = av_find_input_format(formats[i]);
if (inputFormat)
break;
}
sprintf(filename, "/dev/video%d", camIdx); sprintf(filename, "/dev/video%d", camIdx);
} }
if (!inputFormat) if (!inputFormat)
@ -637,20 +637,22 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
if ((p = strchr(filename, ':')) != 0) if ((p = strchr(filename, ':')) != 0)
*p = 0; *p = 0;
} }
if (file && (p = strchr(file, ':')) != NULL) if (file && (p = strchr(file, ':')) != NULL) {
formatParams.standard = p+1; av_dict_set(&formatParams, "standard", p+1, 0);
}
#endif #endif
//frame rate //frame rate
if (m_captRate <= 0.f) if (m_captRate <= 0.f)
m_captRate = defFrameRate; m_captRate = defFrameRate;
sprintf(rateStr, "%f", m_captRate); sprintf(rateStr, "%f", m_captRate);
av_parse_video_rate(&frameRate, rateStr);
// populate format parameters av_dict_set(&formatParams, "framerate", rateStr, 0);
// need to specify the time base = inverse of rate
formatParams.time_base.num = frameRate.den; if (m_captWidth > 0 && m_captHeight > 0) {
formatParams.time_base.den = frameRate.num; char video_size[64];
formatParams.width = m_captWidth; BLI_snprintf(video_size, sizeof(video_size), "%dx%d", m_captWidth, m_captHeight);
formatParams.height = m_captHeight; av_dict_set(&formatParams, "video_size", video_size, 0);
}
if (openStream(filename, inputFormat, &formatParams) != 0) if (openStream(filename, inputFormat, &formatParams) != 0)
return; return;
@ -665,6 +667,8 @@ void VideoFFmpeg::openCam (char * file, short camIdx)
// no need to thread if the system has a single core // no need to thread if the system has a single core
m_isThreaded = true; m_isThreaded = true;
} }
av_dict_free(&formatParams);
} }
// play video // play video

@ -46,10 +46,6 @@ extern "C" {
# define FFMPEG_CODEC_IS_POINTER 1 # define FFMPEG_CODEC_IS_POINTER 1
#endif #endif
#if LIBAVFORMAT_VERSION_INT >= (52 << 16)
# define FFMPEG_PB_IS_POINTER 1
#endif
#ifdef FFMPEG_CODEC_IS_POINTER #ifdef FFMPEG_CODEC_IS_POINTER
static inline AVCodecContext* get_codec_from_stream(AVStream* stream) static inline AVCodecContext* get_codec_from_stream(AVStream* stream)
{ {
@ -172,7 +168,7 @@ protected:
double actFrameRate (void) { return m_frameRate * m_baseFrameRate; } double actFrameRate (void) { return m_frameRate * m_baseFrameRate; }
/// common function to video file and capture /// common function to video file and capture
int openStream(const char *filename, AVInputFormat *inputFormat, AVFormatParameters *formatParams); int openStream(const char *filename, AVInputFormat *inputFormat, AVDictionary **formatParams);
/// check if a frame is available and load it in pFrame, return true if a frame could be retrieved /// check if a frame is available and load it in pFrame, return true if a frame could be retrieved
AVFrame* grabFrame(long frame); AVFrame* grabFrame(long frame);