1 /*
2 * Copyright (c) 2018 Paul B Mahol
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21 #include "libavutil/imgutils.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24
25 #include "avfilter.h"
26 #include "filters.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "video.h"
30
31 typedef struct DedotContext {
32 const AVClass *class;
33 int m;
34 float lt;
35 float tl;
36 float tc;
37 float ct;
38
39 const AVPixFmtDescriptor *desc;
40 int depth;
41 int max;
42 int luma2d;
43 int lumaT;
44 int chromaT1;
45 int chromaT2;
46
47 int eof;
48 int eof_frames;
49 int nb_planes;
50 int planewidth[4];
51 int planeheight[4];
52
53 AVFrame *frames[5];
54
55 int (*dedotcrawl)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
56 int (*derainbow)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
57 } DedotContext;
58
query_formats(AVFilterContext * ctx)59 static int query_formats(AVFilterContext *ctx)
60 {
61 static const enum AVPixelFormat pixel_fmts[] = {
62 AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV440P,
63 AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P,
64 AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUV420P,
65 AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P,
66 AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P,
67 AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
68 AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
69 AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV440P12,
70 AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV444P14,
71 AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
72 AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
73 AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
74 AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
75 AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
76 AV_PIX_FMT_NONE
77 };
78 AVFilterFormats *formats = ff_make_format_list(pixel_fmts);
79 if (!formats)
80 return AVERROR(ENOMEM);
81 return ff_set_common_formats(ctx, formats);
82 }
83
84 #define DEFINE_DEDOTCRAWL(name, type, div) \
85 static int dedotcrawl##name(AVFilterContext *ctx, void *arg, \
86 int jobnr, int nb_jobs) \
87 { \
88 DedotContext *s = ctx->priv; \
89 AVFrame *out = arg; \
90 int src_linesize = s->frames[2]->linesize[0] / div; \
91 int dst_linesize = out->linesize[0] / div; \
92 int p0_linesize = s->frames[0]->linesize[0] / div; \
93 int p1_linesize = s->frames[1]->linesize[0] / div; \
94 int p3_linesize = s->frames[3]->linesize[0] / div; \
95 int p4_linesize = s->frames[4]->linesize[0] / div; \
96 const int h = s->planeheight[0]; \
97 int slice_start = (h * jobnr) / nb_jobs; \
98 int slice_end = (h * (jobnr+1)) / nb_jobs; \
99 type *p0 = (type *)s->frames[0]->data[0]; \
100 type *p1 = (type *)s->frames[1]->data[0]; \
101 type *p3 = (type *)s->frames[3]->data[0]; \
102 type *p4 = (type *)s->frames[4]->data[0]; \
103 type *src = (type *)s->frames[2]->data[0]; \
104 type *dst = (type *)out->data[0]; \
105 const int luma2d = s->luma2d; \
106 const int lumaT = s->lumaT; \
107 \
108 if (!slice_start) { \
109 slice_start++; \
110 } \
111 p0 += p0_linesize * slice_start; \
112 p1 += p1_linesize * slice_start; \
113 p3 += p3_linesize * slice_start; \
114 p4 += p4_linesize * slice_start; \
115 src += src_linesize * slice_start; \
116 dst += dst_linesize * slice_start; \
117 if (slice_end == h) { \
118 slice_end--; \
119 } \
120 for (int y = slice_start; y < slice_end; y++) { \
121 for (int x = 1; x < s->planewidth[0] - 1; x++) { \
122 int above = src[x - src_linesize]; \
123 int bellow = src[x + src_linesize]; \
124 int cur = src[x]; \
125 int left = src[x - 1]; \
126 int right = src[x + 1]; \
127 \
128 if (FFABS(above + bellow - 2 * cur) <= luma2d && \
129 FFABS(left + right - 2 * cur) <= luma2d) \
130 continue; \
131 \
132 if (FFABS(cur - p0[x]) <= lumaT && \
133 FFABS(cur - p4[x]) <= lumaT && \
134 FFABS(p1[x] - p3[x]) <= lumaT) { \
135 int diff1 = FFABS(cur - p1[x]); \
136 int diff2 = FFABS(cur - p3[x]); \
137 \
138 if (diff1 < diff2) \
139 dst[x] = (src[x] + p1[x] + 1) >> 1; \
140 else \
141 dst[x] = (src[x] + p3[x] + 1) >> 1; \
142 } \
143 } \
144 \
145 dst += dst_linesize; \
146 src += src_linesize; \
147 p0 += p0_linesize; \
148 p1 += p1_linesize; \
149 p3 += p3_linesize; \
150 p4 += p4_linesize; \
151 } \
152 return 0; \
153 }
154
155 DEFINE_DEDOTCRAWL(8, uint8_t, 1)
156 DEFINE_DEDOTCRAWL(16, uint16_t, 2)
157
158 typedef struct ThreadData {
159 AVFrame *out;
160 int plane;
161 } ThreadData;
162
163 #define DEFINE_DERAINBOW(name, type, div) \
164 static int derainbow##name(AVFilterContext *ctx, void *arg, \
165 int jobnr, int nb_jobs) \
166 { \
167 DedotContext *s = ctx->priv; \
168 ThreadData *td = arg; \
169 AVFrame *out = td->out; \
170 const int plane = td->plane; \
171 const int h = s->planeheight[plane]; \
172 int slice_start = (h * jobnr) / nb_jobs; \
173 int slice_end = (h * (jobnr+1)) / nb_jobs; \
174 int src_linesize = s->frames[2]->linesize[plane] / div; \
175 int dst_linesize = out->linesize[plane] / div; \
176 int p0_linesize = s->frames[0]->linesize[plane] / div; \
177 int p1_linesize = s->frames[1]->linesize[plane] / div; \
178 int p3_linesize = s->frames[3]->linesize[plane] / div; \
179 int p4_linesize = s->frames[4]->linesize[plane] / div; \
180 type *p0 = (type *)s->frames[0]->data[plane]; \
181 type *p1 = (type *)s->frames[1]->data[plane]; \
182 type *p3 = (type *)s->frames[3]->data[plane]; \
183 type *p4 = (type *)s->frames[4]->data[plane]; \
184 type *src = (type *)s->frames[2]->data[plane]; \
185 type *dst = (type *)out->data[plane]; \
186 const int chromaT1 = s->chromaT1; \
187 const int chromaT2 = s->chromaT2; \
188 \
189 p0 += slice_start * p0_linesize; \
190 p1 += slice_start * p1_linesize; \
191 p3 += slice_start * p3_linesize; \
192 p4 += slice_start * p4_linesize; \
193 src += slice_start * src_linesize; \
194 dst += slice_start * dst_linesize; \
195 for (int y = slice_start; y < slice_end; y++) { \
196 for (int x = 0; x < s->planewidth[plane]; x++) { \
197 int cur = src[x]; \
198 \
199 if (FFABS(cur - p0[x]) <= chromaT1 && \
200 FFABS(cur - p4[x]) <= chromaT1 && \
201 FFABS(p1[x] - p3[x]) <= chromaT1 && \
202 FFABS(cur - p1[x]) > chromaT2 && \
203 FFABS(cur - p3[x]) > chromaT2) { \
204 int diff1 = FFABS(cur - p1[x]); \
205 int diff2 = FFABS(cur - p3[x]); \
206 \
207 if (diff1 < diff2) \
208 dst[x] = (src[x] + p1[x] + 1) >> 1; \
209 else \
210 dst[x] = (src[x] + p3[x] + 1) >> 1; \
211 } \
212 } \
213 \
214 dst += dst_linesize; \
215 src += src_linesize; \
216 p0 += p0_linesize; \
217 p1 += p1_linesize; \
218 p3 += p3_linesize; \
219 p4 += p4_linesize; \
220 } \
221 return 0; \
222 }
223
224 DEFINE_DERAINBOW(8, uint8_t, 1)
225 DEFINE_DERAINBOW(16, uint16_t, 2)
226
config_output(AVFilterLink * outlink)227 static int config_output(AVFilterLink *outlink)
228 {
229 AVFilterContext *ctx = outlink->src;
230 DedotContext *s = ctx->priv;
231 AVFilterLink *inlink = ctx->inputs[0];
232
233 s->desc = av_pix_fmt_desc_get(outlink->format);
234 if (!s->desc)
235 return AVERROR_BUG;
236 s->nb_planes = av_pix_fmt_count_planes(outlink->format);
237 s->depth = s->desc->comp[0].depth;
238 s->max = (1 << s->depth) - 1;
239 s->luma2d = s->lt * s->max;
240 s->lumaT = s->tl * s->max;
241 s->chromaT1 = s->tc * s->max;
242 s->chromaT2 = s->ct * s->max;
243
244 s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, s->desc->log2_chroma_w);
245 s->planewidth[0] = s->planewidth[3] = inlink->w;
246
247 s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, s->desc->log2_chroma_h);
248 s->planeheight[0] = s->planeheight[3] = inlink->h;
249
250 if (s->depth <= 8) {
251 s->dedotcrawl = dedotcrawl8;
252 s->derainbow = derainbow8;
253 } else {
254 s->dedotcrawl = dedotcrawl16;
255 s->derainbow = derainbow16;
256 }
257
258 return 0;
259 }
260
activate(AVFilterContext * ctx)261 static int activate(AVFilterContext *ctx)
262 {
263 AVFilterLink *inlink = ctx->inputs[0];
264 AVFilterLink *outlink = ctx->outputs[0];
265 DedotContext *s = ctx->priv;
266 AVFrame *frame = NULL;
267 int64_t pts;
268 int status;
269 int ret = 0;
270
271 FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
272
273 if (s->eof == 0) {
274 ret = ff_inlink_consume_frame(inlink, &frame);
275 if (ret < 0)
276 return ret;
277 }
278 if (frame || s->eof_frames > 0) {
279 AVFrame *out = NULL;
280
281 if (frame) {
282 for (int i = 2; i < 5; i++) {
283 if (!s->frames[i])
284 s->frames[i] = av_frame_clone(frame);
285 }
286 av_frame_free(&frame);
287 } else if (s->frames[3]) {
288 s->eof_frames--;
289 s->frames[4] = av_frame_clone(s->frames[3]);
290 }
291
292 if (s->frames[0] &&
293 s->frames[1] &&
294 s->frames[2] &&
295 s->frames[3] &&
296 s->frames[4]) {
297 out = av_frame_clone(s->frames[2]);
298 if (out && !ctx->is_disabled) {
299 ret = av_frame_make_writable(out);
300 if (ret >= 0) {
301 if (s->m & 1)
302 ctx->internal->execute(ctx, s->dedotcrawl, out, NULL,
303 FFMIN(s->planeheight[0],
304 ff_filter_get_nb_threads(ctx)));
305 if (s->m & 2) {
306 ThreadData td;
307 td.out = out; td.plane = 1;
308 ctx->internal->execute(ctx, s->derainbow, &td, NULL,
309 FFMIN(s->planeheight[1],
310 ff_filter_get_nb_threads(ctx)));
311 td.plane = 2;
312 ctx->internal->execute(ctx, s->derainbow, &td, NULL,
313 FFMIN(s->planeheight[2],
314 ff_filter_get_nb_threads(ctx)));
315 }
316 } else
317 av_frame_free(&out);
318 } else if (!out) {
319 ret = AVERROR(ENOMEM);
320 }
321 }
322
323 av_frame_free(&s->frames[0]);
324 s->frames[0] = s->frames[1];
325 s->frames[1] = s->frames[2];
326 s->frames[2] = s->frames[3];
327 s->frames[3] = s->frames[4];
328 s->frames[4] = NULL;
329
330 if (ret < 0)
331 return ret;
332 if (out)
333 return ff_filter_frame(outlink, out);
334 }
335
336 if (s->eof) {
337 if (s->eof_frames <= 0) {
338 ff_outlink_set_status(outlink, AVERROR_EOF, s->frames[2]->pts);
339 } else {
340 ff_filter_set_ready(ctx, 10);
341 }
342 return 0;
343 }
344
345 if (!s->eof && ff_inlink_acknowledge_status(inlink, &status, &pts)) {
346 if (status == AVERROR_EOF) {
347 s->eof = 1;
348 s->eof_frames = !!s->frames[0] + !!s->frames[1];
349 if (s->eof_frames <= 0) {
350 ff_outlink_set_status(outlink, AVERROR_EOF, pts);
351 return 0;
352 }
353 ff_filter_set_ready(ctx, 10);
354 return 0;
355 }
356 }
357
358 FF_FILTER_FORWARD_WANTED(outlink, inlink);
359
360 return FFERROR_NOT_READY;
361 }
362
uninit(AVFilterContext * ctx)363 static av_cold void uninit(AVFilterContext *ctx)
364 {
365 DedotContext *s = ctx->priv;
366
367 for (int i = 0; i < 5; i++)
368 av_frame_free(&s->frames[i]);
369 }
370
371 #define OFFSET(x) offsetof(DedotContext, x)
372 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
373
374 static const AVOption dedot_options[] = {
375 { "m", "set filtering mode", OFFSET( m), AV_OPT_TYPE_FLAGS, {.i64=3}, 0, 3, FLAGS, "m" },
376 { "dotcrawl", 0, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, FLAGS, "m" },
377 { "rainbows", 0, 0, AV_OPT_TYPE_CONST, {.i64=2}, 0, 0, FLAGS, "m" },
378 { "lt", "set spatial luma threshold", OFFSET(lt), AV_OPT_TYPE_FLOAT, {.dbl=.079}, 0, 1, FLAGS },
379 { "tl", "set tolerance for temporal luma", OFFSET(tl), AV_OPT_TYPE_FLOAT, {.dbl=.079}, 0, 1, FLAGS },
380 { "tc", "set tolerance for chroma temporal variation", OFFSET(tc), AV_OPT_TYPE_FLOAT, {.dbl=.058}, 0, 1, FLAGS },
381 { "ct", "set temporal chroma threshold", OFFSET(ct), AV_OPT_TYPE_FLOAT, {.dbl=.019}, 0, 1, FLAGS },
382 { NULL },
383 };
384
385 static const AVFilterPad inputs[] = {
386 {
387 .name = "default",
388 .type = AVMEDIA_TYPE_VIDEO,
389 },
390 { NULL }
391 };
392
393 static const AVFilterPad outputs[] = {
394 {
395 .name = "default",
396 .type = AVMEDIA_TYPE_VIDEO,
397 .config_props = config_output,
398 },
399 { NULL }
400 };
401
402 AVFILTER_DEFINE_CLASS(dedot);
403
404 AVFilter ff_vf_dedot = {
405 .name = "dedot",
406 .description = NULL_IF_CONFIG_SMALL("Reduce cross-luminance and cross-color."),
407 .priv_size = sizeof(DedotContext),
408 .priv_class = &dedot_class,
409 .query_formats = query_formats,
410 .activate = activate,
411 .uninit = uninit,
412 .inputs = inputs,
413 .outputs = outputs,
414 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL | AVFILTER_FLAG_SLICE_THREADS,
415 };
416