1 /*
2 * Copyright 2007 Bobby Bingham
3 * Copyright 2012 Robert Nagy <ronag89 gmail com>
4 * Copyright 2012 Anton Khirnov <anton khirnov net>
5 * Copyright 2018 Calvin Walton <calvin.walton@kepstin.ca>
6 *
7 * This file is part of FFmpeg.
8 *
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
13 *
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24 /**
25 * @file
26 * a filter enforcing given constant framerate
27 */
28
29 #include <float.h>
30 #include <stdint.h>
31
32 #include "libavutil/avassert.h"
33 #include "libavutil/mathematics.h"
34 #include "libavutil/opt.h"
35 #include "avfilter.h"
36 #include "filters.h"
37 #include "internal.h"
38
39 enum EOFAction {
40 EOF_ACTION_ROUND,
41 EOF_ACTION_PASS,
42 EOF_ACTION_NB
43 };
44
45 typedef struct FPSContext {
46 const AVClass *class;
47
48 double start_time; ///< pts, in seconds, of the expected first frame
49
50 AVRational framerate; ///< target framerate
51 int rounding; ///< AVRounding method for timestamps
52 int eof_action; ///< action performed for last frame in FIFO
53
54 /* Set during outlink configuration */
55 int64_t in_pts_off; ///< input frame pts offset for start_time handling
56 int64_t out_pts_off; ///< output frame pts offset for start_time handling
57
58 /* Runtime state */
59 int status; ///< buffered input status
60 int64_t status_pts; ///< buffered input status timestamp
61
62 AVFrame *frames[2]; ///< buffered frames
63 int frames_count; ///< number of buffered frames
64
65 int64_t next_pts; ///< pts of the next frame to output
66
67 /* statistics */
68 int cur_frame_out; ///< number of times current frame has been output
69 int frames_in; ///< number of frames on input
70 int frames_out; ///< number of frames on output
71 int dup; ///< number of frames duplicated
72 int drop; ///< number of framed dropped
73 } FPSContext;
74
75 #define OFFSET(x) offsetof(FPSContext, x)
76 #define V AV_OPT_FLAG_VIDEO_PARAM
77 #define F AV_OPT_FLAG_FILTERING_PARAM
78 static const AVOption fps_options[] = {
79 { "fps", "A string describing desired output framerate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "25" }, 0, INT_MAX, V|F },
80 { "start_time", "Assume the first PTS should be this value.", OFFSET(start_time), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX}, -DBL_MAX, DBL_MAX, V|F },
81 { "round", "set rounding method for timestamps", OFFSET(rounding), AV_OPT_TYPE_INT, { .i64 = AV_ROUND_NEAR_INF }, 0, 5, V|F, "round" },
82 { "zero", "round towards 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_ZERO }, 0, 0, V|F, "round" },
83 { "inf", "round away from 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_INF }, 0, 0, V|F, "round" },
84 { "down", "round towards -infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_DOWN }, 0, 0, V|F, "round" },
85 { "up", "round towards +infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_UP }, 0, 0, V|F, "round" },
86 { "near", "round to nearest", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_NEAR_INF }, 0, 0, V|F, "round" },
87 { "eof_action", "action performed for last frame", OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_ROUND }, 0, EOF_ACTION_NB-1, V|F, "eof_action" },
88 { "round", "round similar to other frames", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ROUND }, 0, 0, V|F, "eof_action" },
89 { "pass", "pass through last frame", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, 0, 0, V|F, "eof_action" },
90 { NULL }
91 };
92
93 AVFILTER_DEFINE_CLASS(fps);
94
init(AVFilterContext * ctx)95 static av_cold int init(AVFilterContext *ctx)
96 {
97 FPSContext *s = ctx->priv;
98
99 s->status_pts = AV_NOPTS_VALUE;
100 s->next_pts = AV_NOPTS_VALUE;
101
102 av_log(ctx, AV_LOG_VERBOSE, "fps=%d/%d\n", s->framerate.num, s->framerate.den);
103 return 0;
104 }
105
106 /* Remove the first frame from the buffer, returning it */
shift_frame(AVFilterContext * ctx,FPSContext * s)107 static AVFrame *shift_frame(AVFilterContext *ctx, FPSContext *s)
108 {
109 AVFrame *frame;
110
111 /* Must only be called when there are frames in the buffer */
112 av_assert1(s->frames_count > 0);
113
114 frame = s->frames[0];
115 s->frames[0] = s->frames[1];
116 s->frames[1] = NULL;
117 s->frames_count--;
118
119 /* Update statistics counters */
120 s->frames_out += s->cur_frame_out;
121 if (s->cur_frame_out > 1) {
122 av_log(ctx, AV_LOG_DEBUG, "Duplicated frame with pts %"PRId64" %d times\n",
123 frame->pts, s->cur_frame_out - 1);
124 s->dup += s->cur_frame_out - 1;
125 } else if (s->cur_frame_out == 0) {
126 av_log(ctx, AV_LOG_DEBUG, "Dropping frame with pts %"PRId64"\n",
127 frame->pts);
128 s->drop++;
129 }
130 s->cur_frame_out = 0;
131
132 return frame;
133 }
134
uninit(AVFilterContext * ctx)135 static av_cold void uninit(AVFilterContext *ctx)
136 {
137 FPSContext *s = ctx->priv;
138
139 AVFrame *frame;
140
141 while (s->frames_count > 0) {
142 frame = shift_frame(ctx, s);
143 av_frame_free(&frame);
144 }
145
146 av_log(ctx, AV_LOG_VERBOSE, "%d frames in, %d frames out; %d frames dropped, "
147 "%d frames duplicated.\n", s->frames_in, s->frames_out, s->drop, s->dup);
148 }
149
config_props(AVFilterLink * outlink)150 static int config_props(AVFilterLink* outlink)
151 {
152 AVFilterContext *ctx = outlink->src;
153 AVFilterLink *inlink = ctx->inputs[0];
154 FPSContext *s = ctx->priv;
155
156 outlink->time_base = av_inv_q(s->framerate);
157 outlink->frame_rate = s->framerate;
158
159 /* Calculate the input and output pts offsets for start_time */
160 if (s->start_time != DBL_MAX && s->start_time != AV_NOPTS_VALUE) {
161 double first_pts = s->start_time * AV_TIME_BASE;
162 if (first_pts < INT64_MIN || first_pts > INT64_MAX) {
163 av_log(ctx, AV_LOG_ERROR, "Start time %f cannot be represented in internal time base\n",
164 s->start_time);
165 return AVERROR(EINVAL);
166 }
167 s->in_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, inlink->time_base,
168 s->rounding | AV_ROUND_PASS_MINMAX);
169 s->out_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, outlink->time_base,
170 s->rounding | AV_ROUND_PASS_MINMAX);
171 s->next_pts = s->out_pts_off;
172 av_log(ctx, AV_LOG_VERBOSE, "Set first pts to (in:%"PRId64" out:%"PRId64") from start time %f\n",
173 s->in_pts_off, s->out_pts_off, s->start_time);
174 }
175
176 return 0;
177 }
178
179 /* Read a frame from the input and save it in the buffer */
read_frame(AVFilterContext * ctx,FPSContext * s,AVFilterLink * inlink,AVFilterLink * outlink)180 static int read_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink)
181 {
182 AVFrame *frame;
183 int ret;
184 int64_t in_pts;
185
186 /* Must only be called when we have buffer room available */
187 av_assert1(s->frames_count < 2);
188
189 ret = ff_inlink_consume_frame(inlink, &frame);
190 /* Caller must have run ff_inlink_check_available_frame first */
191 av_assert1(ret);
192 if (ret < 0)
193 return ret;
194
195 /* Convert frame pts to output timebase.
196 * The dance with offsets is required to match the rounding behaviour of the
197 * previous version of the fps filter when using the start_time option. */
198 in_pts = frame->pts;
199 frame->pts = s->out_pts_off + av_rescale_q_rnd(in_pts - s->in_pts_off,
200 inlink->time_base, outlink->time_base,
201 s->rounding | AV_ROUND_PASS_MINMAX);
202
203 av_log(ctx, AV_LOG_DEBUG, "Read frame with in pts %"PRId64", out pts %"PRId64"\n",
204 in_pts, frame->pts);
205
206 s->frames[s->frames_count++] = frame;
207 s->frames_in++;
208
209 return 1;
210 }
211
212 /* Write a frame to the output */
write_frame(AVFilterContext * ctx,FPSContext * s,AVFilterLink * outlink,int * again)213 static int write_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *outlink, int *again)
214 {
215 AVFrame *frame;
216
217 av_assert1(s->frames_count == 2 || (s->status && s->frames_count == 1));
218
219 /* We haven't yet determined the pts of the first frame */
220 if (s->next_pts == AV_NOPTS_VALUE) {
221 if (s->frames[0]->pts != AV_NOPTS_VALUE) {
222 s->next_pts = s->frames[0]->pts;
223 av_log(ctx, AV_LOG_VERBOSE, "Set first pts to %"PRId64"\n", s->next_pts);
224 } else {
225 av_log(ctx, AV_LOG_WARNING, "Discarding initial frame(s) with no "
226 "timestamp.\n");
227 frame = shift_frame(ctx, s);
228 av_frame_free(&frame);
229 *again = 1;
230 return 0;
231 }
232 }
233
234 /* There are two conditions where we want to drop a frame:
235 * - If we have two buffered frames and the second frame is acceptable
236 * as the next output frame, then drop the first buffered frame.
237 * - If we have status (EOF) set, drop frames when we hit the
238 * status timestamp. */
239 if ((s->frames_count == 2 && s->frames[1]->pts <= s->next_pts) ||
240 (s->status && s->status_pts <= s->next_pts)) {
241
242 frame = shift_frame(ctx, s);
243 av_frame_free(&frame);
244 *again = 1;
245 return 0;
246
247 /* Output a copy of the first buffered frame */
248 } else {
249 frame = av_frame_clone(s->frames[0]);
250 if (!frame)
251 return AVERROR(ENOMEM);
252 // Make sure Closed Captions will not be duplicated
253 av_frame_remove_side_data(s->frames[0], AV_FRAME_DATA_A53_CC);
254 frame->pts = s->next_pts++;
255
256 av_log(ctx, AV_LOG_DEBUG, "Writing frame with pts %"PRId64" to pts %"PRId64"\n",
257 s->frames[0]->pts, frame->pts);
258 s->cur_frame_out++;
259 *again = 1;
260 return ff_filter_frame(outlink, frame);
261 }
262 }
263
264 /* Convert status_pts to outlink timebase */
update_eof_pts(AVFilterContext * ctx,FPSContext * s,AVFilterLink * inlink,AVFilterLink * outlink,int64_t status_pts)265 static void update_eof_pts(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink, int64_t status_pts)
266 {
267 int eof_rounding = (s->eof_action == EOF_ACTION_PASS) ? AV_ROUND_UP : s->rounding;
268 s->status_pts = av_rescale_q_rnd(status_pts, inlink->time_base, outlink->time_base,
269 eof_rounding | AV_ROUND_PASS_MINMAX);
270
271 av_log(ctx, AV_LOG_DEBUG, "EOF is at pts %"PRId64"\n", s->status_pts);
272 }
273
activate(AVFilterContext * ctx)274 static int activate(AVFilterContext *ctx)
275 {
276 FPSContext *s = ctx->priv;
277 AVFilterLink *inlink = ctx->inputs[0];
278 AVFilterLink *outlink = ctx->outputs[0];
279
280 int ret;
281 int again = 0;
282 int64_t status_pts;
283
284 FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
285
286 /* No buffered status: normal operation */
287 if (!s->status) {
288
289 /* Read available input frames if we have room */
290 while (s->frames_count < 2 && ff_inlink_check_available_frame(inlink)) {
291 ret = read_frame(ctx, s, inlink, outlink);
292 if (ret < 0)
293 return ret;
294 }
295
296 /* We do not yet have enough frames to produce output */
297 if (s->frames_count < 2) {
298 /* Check if we've hit EOF (or otherwise that an error status is set) */
299 ret = ff_inlink_acknowledge_status(inlink, &s->status, &status_pts);
300 if (ret > 0)
301 update_eof_pts(ctx, s, inlink, outlink, status_pts);
302
303 if (!ret) {
304 /* If someone wants us to output, we'd better ask for more input */
305 FF_FILTER_FORWARD_WANTED(outlink, inlink);
306 return 0;
307 }
308 }
309 }
310
311 /* Buffered frames are available, so generate an output frame */
312 if (s->frames_count > 0) {
313 ret = write_frame(ctx, s, outlink, &again);
314 /* Couldn't generate a frame, so schedule us to perform another step */
315 if (again)
316 ff_filter_set_ready(ctx, 100);
317 return ret;
318 }
319
320 /* No frames left, so forward the status */
321 if (s->status && s->frames_count == 0) {
322 ff_outlink_set_status(outlink, s->status, s->next_pts);
323 return 0;
324 }
325
326 return FFERROR_NOT_READY;
327 }
328
329 static const AVFilterPad avfilter_vf_fps_inputs[] = {
330 {
331 .name = "default",
332 .type = AVMEDIA_TYPE_VIDEO,
333 },
334 { NULL }
335 };
336
337 static const AVFilterPad avfilter_vf_fps_outputs[] = {
338 {
339 .name = "default",
340 .type = AVMEDIA_TYPE_VIDEO,
341 .config_props = config_props,
342 },
343 { NULL }
344 };
345
346 AVFilter ff_vf_fps = {
347 .name = "fps",
348 .description = NULL_IF_CONFIG_SMALL("Force constant framerate."),
349 .init = init,
350 .uninit = uninit,
351 .priv_size = sizeof(FPSContext),
352 .priv_class = &fps_class,
353 .activate = activate,
354 .inputs = avfilter_vf_fps_inputs,
355 .outputs = avfilter_vf_fps_outputs,
356 };
357