From aba232cfa9b193604ed98f3fa505378d006b1b3b Mon Sep 17 00:00:00 2001 From: Anton Khirnov Date: Tue, 26 Jun 2012 13:10:01 +0200 Subject: lavf: deprecate r_frame_rate. According to its description, it is supposed to be the LCM of all the frame durations. The usability of such a thing is vanishingly small, especially since we cannot determine it with any amount of reliability. Therefore get rid of it after the next bump. Replace it with the average framerate where it makes sense. FATE results for the wtv and xmv demux tests change. In the wtv case this is caused by the file being corrupted (or possibly badly cut) and containing invalid timestamps. This results in lavf estimating the framerate wrong and making up wrong frame durations. In the xmv case the file contains pts jumps, so again the estimated framerate is far from anything sane and lavf again makes up different frame durations. In some other tests lavf starts making up frame durations from different frame. --- libavformat/avformat.h | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'libavformat/avformat.h') diff --git a/libavformat/avformat.h b/libavformat/avformat.h index 0e50487414..1dbbb10338 100644 --- a/libavformat/avformat.h +++ b/libavformat/avformat.h @@ -630,6 +630,7 @@ typedef struct AVStream { * not actually used for encoding. */ AVCodecContext *codec; +#if FF_API_R_FRAME_RATE /** * Real base framerate of the stream. * This is the lowest framerate with which all timestamps can be @@ -639,6 +640,7 @@ typedef struct AVStream { * approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1. */ AVRational r_frame_rate; +#endif void *priv_data; /** @@ -714,10 +716,12 @@ typedef struct AVStream { */ #define MAX_STD_TIMEBASES (60*12+5) struct { +#if FF_API_R_FRAME_RATE int64_t last_dts; int64_t duration_gcd; int duration_count; double duration_error[MAX_STD_TIMEBASES]; +#endif int nb_decoded_frames; int found_decoder; -- cgit v1.2.1