1 /*
2 * Copyright 2007 Bobby Bingham
3 * Copyright 2012 Robert Nagy <ronag89 gmail com>
4 * Copyright 2012 Anton Khirnov <anton khirnov net>
5 * Copyright 2018 Calvin Walton <calvin.walton@kepstin.ca>
6 *
7 * This file is part of FFmpeg.
8 *
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
13 *
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24 /**
25 * @file
26 * a filter enforcing given constant framerate
27 */
28
29 #include <float.h>
30 #include <stdint.h>
31
32 #include "libavutil/avassert.h"
33 #include "libavutil/eval.h"
34 #include "libavutil/mathematics.h"
35 #include "libavutil/opt.h"
36 #include "avfilter.h"
37 #include "filters.h"
38 #include "internal.h"
39
40 enum EOFAction {
41 EOF_ACTION_ROUND,
42 EOF_ACTION_PASS,
43 EOF_ACTION_NB
44 };
45
46 static const char *const var_names[] = {
47 "source_fps",
48 "ntsc",
49 "pal",
50 "film",
51 "ntsc_film",
52 NULL
53 };
54
55 enum var_name {
56 VAR_SOURCE_FPS,
57 VAR_FPS_NTSC,
58 VAR_FPS_PAL,
59 VAR_FPS_FILM,
60 VAR_FPS_NTSC_FILM,
61 VARS_NB
62 };
63
64 static const double ntsc_fps = 30000.0 / 1001.0;
65 static const double pal_fps = 25.0;
66 static const double film_fps = 24.0;
67 static const double ntsc_film_fps = 24000.0 / 1001.0;
68
69 typedef struct FPSContext {
70 const AVClass *class;
71
72 double start_time; ///< pts, in seconds, of the expected first frame
73
74 char *framerate; ///< expression that defines the target framerate
75 int rounding; ///< AVRounding method for timestamps
76 int eof_action; ///< action performed for last frame in FIFO
77
78 /* Set during outlink configuration */
79 int64_t in_pts_off; ///< input frame pts offset for start_time handling
80 int64_t out_pts_off; ///< output frame pts offset for start_time handling
81
82 /* Runtime state */
83 int status; ///< buffered input status
84 int64_t status_pts; ///< buffered input status timestamp
85
86 AVFrame *frames[2]; ///< buffered frames
87 int frames_count; ///< number of buffered frames
88
89 int64_t next_pts; ///< pts of the next frame to output
90
91 /* statistics */
92 int cur_frame_out; ///< number of times current frame has been output
93 int frames_in; ///< number of frames on input
94 int frames_out; ///< number of frames on output
95 int dup; ///< number of frames duplicated
96 int drop; ///< number of framed dropped
97 } FPSContext;
98
99 #define OFFSET(x) offsetof(FPSContext, x)
100 #define V AV_OPT_FLAG_VIDEO_PARAM
101 #define F AV_OPT_FLAG_FILTERING_PARAM
102 static const AVOption fps_options[] = {
103 { "fps", "A string describing desired output framerate", OFFSET(framerate), AV_OPT_TYPE_STRING, { .str = "25" }, 0, 0, V|F },
104 { "start_time", "Assume the first PTS should be this value.", OFFSET(start_time), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX}, -DBL_MAX, DBL_MAX, V|F },
105 { "round", "set rounding method for timestamps", OFFSET(rounding), AV_OPT_TYPE_INT, { .i64 = AV_ROUND_NEAR_INF }, 0, 5, V|F, "round" },
106 { "zero", "round towards 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_ZERO }, 0, 0, V|F, "round" },
107 { "inf", "round away from 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_INF }, 0, 0, V|F, "round" },
108 { "down", "round towards -infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_DOWN }, 0, 0, V|F, "round" },
109 { "up", "round towards +infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_UP }, 0, 0, V|F, "round" },
110 { "near", "round to nearest", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_NEAR_INF }, 0, 0, V|F, "round" },
111 { "eof_action", "action performed for last frame", OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_ROUND }, 0, EOF_ACTION_NB-1, V|F, "eof_action" },
112 { "round", "round similar to other frames", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ROUND }, 0, 0, V|F, "eof_action" },
113 { "pass", "pass through last frame", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, 0, 0, V|F, "eof_action" },
114 { NULL }
115 };
116
117 AVFILTER_DEFINE_CLASS(fps);
118
init(AVFilterContext * ctx)119 static av_cold int init(AVFilterContext *ctx)
120 {
121 FPSContext *s = ctx->priv;
122
123 s->status_pts = AV_NOPTS_VALUE;
124 s->next_pts = AV_NOPTS_VALUE;
125
126 return 0;
127 }
128
129 /* Remove the first frame from the buffer, returning it */
shift_frame(AVFilterContext * ctx,FPSContext * s)130 static AVFrame *shift_frame(AVFilterContext *ctx, FPSContext *s)
131 {
132 AVFrame *frame;
133
134 /* Must only be called when there are frames in the buffer */
135 av_assert1(s->frames_count > 0);
136
137 frame = s->frames[0];
138 s->frames[0] = s->frames[1];
139 s->frames[1] = NULL;
140 s->frames_count--;
141
142 /* Update statistics counters */
143 s->frames_out += s->cur_frame_out;
144 if (s->cur_frame_out > 1) {
145 av_log(ctx, AV_LOG_DEBUG, "Duplicated frame with pts %"PRId64" %d times\n",
146 frame->pts, s->cur_frame_out - 1);
147 s->dup += s->cur_frame_out - 1;
148 } else if (s->cur_frame_out == 0) {
149 av_log(ctx, AV_LOG_DEBUG, "Dropping frame with pts %"PRId64"\n",
150 frame->pts);
151 s->drop++;
152 }
153 s->cur_frame_out = 0;
154
155 return frame;
156 }
157
uninit(AVFilterContext * ctx)158 static av_cold void uninit(AVFilterContext *ctx)
159 {
160 FPSContext *s = ctx->priv;
161
162 AVFrame *frame;
163
164 while (s->frames_count > 0) {
165 frame = shift_frame(ctx, s);
166 av_frame_free(&frame);
167 }
168
169 av_log(ctx, AV_LOG_VERBOSE, "%d frames in, %d frames out; %d frames dropped, "
170 "%d frames duplicated.\n", s->frames_in, s->frames_out, s->drop, s->dup);
171 }
172
config_props(AVFilterLink * outlink)173 static int config_props(AVFilterLink* outlink)
174 {
175 AVFilterContext *ctx = outlink->src;
176 AVFilterLink *inlink = ctx->inputs[0];
177 FPSContext *s = ctx->priv;
178
179 double var_values[VARS_NB], res;
180 int ret;
181
182 var_values[VAR_SOURCE_FPS] = av_q2d(inlink->frame_rate);
183 var_values[VAR_FPS_NTSC] = ntsc_fps;
184 var_values[VAR_FPS_PAL] = pal_fps;
185 var_values[VAR_FPS_FILM] = film_fps;
186 var_values[VAR_FPS_NTSC_FILM] = ntsc_film_fps;
187 ret = av_expr_parse_and_eval(&res, s->framerate,
188 var_names, var_values,
189 NULL, NULL, NULL, NULL, NULL, 0, ctx);
190 if (ret < 0)
191 return ret;
192
193 outlink->frame_rate = av_d2q(res, INT_MAX);
194 outlink->time_base = av_inv_q(outlink->frame_rate);
195
196 /* Calculate the input and output pts offsets for start_time */
197 if (s->start_time != DBL_MAX && s->start_time != AV_NOPTS_VALUE) {
198 double first_pts = s->start_time * AV_TIME_BASE;
199 if (first_pts < INT64_MIN || first_pts > INT64_MAX) {
200 av_log(ctx, AV_LOG_ERROR, "Start time %f cannot be represented in internal time base\n",
201 s->start_time);
202 return AVERROR(EINVAL);
203 }
204 s->in_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, inlink->time_base,
205 s->rounding | AV_ROUND_PASS_MINMAX);
206 s->out_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, outlink->time_base,
207 s->rounding | AV_ROUND_PASS_MINMAX);
208 s->next_pts = s->out_pts_off;
209 av_log(ctx, AV_LOG_VERBOSE, "Set first pts to (in:%"PRId64" out:%"PRId64") from start time %f\n",
210 s->in_pts_off, s->out_pts_off, s->start_time);
211 }
212
213 av_log(ctx, AV_LOG_VERBOSE, "fps=%d/%d\n", outlink->frame_rate.num, outlink->frame_rate.den);
214
215 return 0;
216 }
217
218 /* Read a frame from the input and save it in the buffer */
read_frame(AVFilterContext * ctx,FPSContext * s,AVFilterLink * inlink,AVFilterLink * outlink)219 static int read_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink)
220 {
221 AVFrame *frame;
222 int ret;
223 int64_t in_pts;
224
225 /* Must only be called when we have buffer room available */
226 av_assert1(s->frames_count < 2);
227
228 ret = ff_inlink_consume_frame(inlink, &frame);
229 /* Caller must have run ff_inlink_check_available_frame first */
230 av_assert1(ret);
231 if (ret < 0)
232 return ret;
233
234 /* Convert frame pts to output timebase.
235 * The dance with offsets is required to match the rounding behaviour of the
236 * previous version of the fps filter when using the start_time option. */
237 in_pts = frame->pts;
238 frame->pts = s->out_pts_off + av_rescale_q_rnd(in_pts - s->in_pts_off,
239 inlink->time_base, outlink->time_base,
240 s->rounding | AV_ROUND_PASS_MINMAX);
241
242 av_log(ctx, AV_LOG_DEBUG, "Read frame with in pts %"PRId64", out pts %"PRId64"\n",
243 in_pts, frame->pts);
244
245 s->frames[s->frames_count++] = frame;
246 s->frames_in++;
247
248 return 1;
249 }
250
251 /* Write a frame to the output */
write_frame(AVFilterContext * ctx,FPSContext * s,AVFilterLink * outlink,int * again)252 static int write_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *outlink, int *again)
253 {
254 AVFrame *frame;
255
256 av_assert1(s->frames_count == 2 || (s->status && s->frames_count == 1));
257
258 /* We haven't yet determined the pts of the first frame */
259 if (s->next_pts == AV_NOPTS_VALUE) {
260 if (s->frames[0]->pts != AV_NOPTS_VALUE) {
261 s->next_pts = s->frames[0]->pts;
262 av_log(ctx, AV_LOG_VERBOSE, "Set first pts to %"PRId64"\n", s->next_pts);
263 } else {
264 av_log(ctx, AV_LOG_WARNING, "Discarding initial frame(s) with no "
265 "timestamp.\n");
266 frame = shift_frame(ctx, s);
267 av_frame_free(&frame);
268 *again = 1;
269 return 0;
270 }
271 }
272
273 /* There are two conditions where we want to drop a frame:
274 * - If we have two buffered frames and the second frame is acceptable
275 * as the next output frame, then drop the first buffered frame.
276 * - If we have status (EOF) set, drop frames when we hit the
277 * status timestamp. */
278 if ((s->frames_count == 2 && s->frames[1]->pts <= s->next_pts) ||
279 (s->status && s->status_pts <= s->next_pts)) {
280
281 frame = shift_frame(ctx, s);
282 av_frame_free(&frame);
283 *again = 1;
284 return 0;
285
286 /* Output a copy of the first buffered frame */
287 } else {
288 frame = av_frame_clone(s->frames[0]);
289 if (!frame)
290 return AVERROR(ENOMEM);
291 // Make sure Closed Captions will not be duplicated
292 av_frame_remove_side_data(s->frames[0], AV_FRAME_DATA_A53_CC);
293 frame->pts = s->next_pts++;
294
295 av_log(ctx, AV_LOG_DEBUG, "Writing frame with pts %"PRId64" to pts %"PRId64"\n",
296 s->frames[0]->pts, frame->pts);
297 s->cur_frame_out++;
298 *again = 1;
299 return ff_filter_frame(outlink, frame);
300 }
301 }
302
303 /* Convert status_pts to outlink timebase */
update_eof_pts(AVFilterContext * ctx,FPSContext * s,AVFilterLink * inlink,AVFilterLink * outlink,int64_t status_pts)304 static void update_eof_pts(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink, int64_t status_pts)
305 {
306 int eof_rounding = (s->eof_action == EOF_ACTION_PASS) ? AV_ROUND_UP : s->rounding;
307 s->status_pts = av_rescale_q_rnd(status_pts, inlink->time_base, outlink->time_base,
308 eof_rounding | AV_ROUND_PASS_MINMAX);
309
310 av_log(ctx, AV_LOG_DEBUG, "EOF is at pts %"PRId64"\n", s->status_pts);
311 }
312
activate(AVFilterContext * ctx)313 static int activate(AVFilterContext *ctx)
314 {
315 FPSContext *s = ctx->priv;
316 AVFilterLink *inlink = ctx->inputs[0];
317 AVFilterLink *outlink = ctx->outputs[0];
318
319 int ret;
320 int again = 0;
321 int64_t status_pts;
322
323 FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
324
325 /* No buffered status: normal operation */
326 if (!s->status) {
327
328 /* Read available input frames if we have room */
329 while (s->frames_count < 2 && ff_inlink_check_available_frame(inlink)) {
330 ret = read_frame(ctx, s, inlink, outlink);
331 if (ret < 0)
332 return ret;
333 }
334
335 /* We do not yet have enough frames to produce output */
336 if (s->frames_count < 2) {
337 /* Check if we've hit EOF (or otherwise that an error status is set) */
338 ret = ff_inlink_acknowledge_status(inlink, &s->status, &status_pts);
339 if (ret > 0)
340 update_eof_pts(ctx, s, inlink, outlink, status_pts);
341
342 if (!ret) {
343 /* If someone wants us to output, we'd better ask for more input */
344 FF_FILTER_FORWARD_WANTED(outlink, inlink);
345 return 0;
346 }
347 }
348 }
349
350 /* Buffered frames are available, so generate an output frame */
351 if (s->frames_count > 0) {
352 ret = write_frame(ctx, s, outlink, &again);
353 /* Couldn't generate a frame, so schedule us to perform another step */
354 if (again && ff_inoutlink_check_flow(inlink, outlink))
355 ff_filter_set_ready(ctx, 100);
356 return ret;
357 }
358
359 /* No frames left, so forward the status */
360 if (s->status && s->frames_count == 0) {
361 ff_outlink_set_status(outlink, s->status, s->next_pts);
362 return 0;
363 }
364
365 return FFERROR_NOT_READY;
366 }
367
368 static const AVFilterPad avfilter_vf_fps_inputs[] = {
369 {
370 .name = "default",
371 .type = AVMEDIA_TYPE_VIDEO,
372 },
373 };
374
375 static const AVFilterPad avfilter_vf_fps_outputs[] = {
376 {
377 .name = "default",
378 .type = AVMEDIA_TYPE_VIDEO,
379 .config_props = config_props,
380 },
381 };
382
383 const AVFilter ff_vf_fps = {
384 .name = "fps",
385 .description = NULL_IF_CONFIG_SMALL("Force constant framerate."),
386 .init = init,
387 .uninit = uninit,
388 .priv_size = sizeof(FPSContext),
389 .priv_class = &fps_class,
390 .activate = activate,
391 .flags = AVFILTER_FLAG_METADATA_ONLY,
392 FILTER_INPUTS(avfilter_vf_fps_inputs),
393 FILTER_OUTPUTS(avfilter_vf_fps_outputs),
394 };
395