• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (c) 2016 Thilo Borgmann
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21/**
22 * @file
23 * Video processing based on Apple's CoreImage API
24 */
25
26#import <CoreImage/CoreImage.h>
27#import <AppKit/AppKit.h>
28
29#include "avfilter.h"
30#include "formats.h"
31#include "internal.h"
32#include "video.h"
33#include "libavutil/internal.h"
34#include "libavutil/opt.h"
35#include "libavutil/pixdesc.h"
36
37typedef struct CoreImageContext {
38    const AVClass   *class;
39
40    int             is_video_source;    ///< filter is used as video source
41
42    int             w, h;               ///< video size
43    AVRational      sar;                ///< sample aspect ratio
44    AVRational      frame_rate;         ///< video frame rate
45    AVRational      time_base;          ///< stream time base
46    int64_t         duration;           ///< duration expressed in microseconds
47    int64_t         pts;                ///< increasing presentation time stamp
48    AVFrame         *picref;            ///< cached reference containing the painted picture
49
50    CFTypeRef       glctx;              ///< OpenGL context
51    CGContextRef    cgctx;              ///< Bitmap context for image copy
52    CFTypeRef       input_image;        ///< Input image container for passing into Core Image API
53    CGColorSpaceRef color_space;        ///< Common color space for input image and cgcontext
54    int             bits_per_component; ///< Shared bpc for input-output operation
55
56    char            *filter_string;     ///< The complete user provided filter definition
57    CFTypeRef       *filters;           ///< CIFilter object for all requested filters
58    int             num_filters;        ///< Amount of filters in *filters
59
60    char            *output_rect;       ///< Rectangle to be filled with filter intput
61    int             list_filters;       ///< Option used to list all available filters including generators
62    int             list_generators;    ///< Option used to list all available generators
63} CoreImageContext;
64
65static int config_output(AVFilterLink *link)
66{
67    CoreImageContext *ctx = link->src->priv;
68
69    link->w                   = ctx->w;
70    link->h                   = ctx->h;
71    link->sample_aspect_ratio = ctx->sar;
72    link->frame_rate          = ctx->frame_rate;
73    link->time_base           = ctx->time_base;
74
75    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
76    ctx->bits_per_component        = av_get_bits_per_pixel(desc) / desc->nb_components;
77
78    return 0;
79}
80
81/** Determine image properties from input link of filter chain.
82 */
83static int config_input(AVFilterLink *link)
84{
85    CoreImageContext *ctx          = link->dst->priv;
86    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
87    ctx->bits_per_component        = av_get_bits_per_pixel(desc) / desc->nb_components;
88
89    return 0;
90}
91
92/** Print a list of all available filters including options and respective value ranges and defaults.
93 */
94static void list_filters(CoreImageContext *ctx)
95{
96    // querying filters and attributes
97    NSArray *filter_categories = nil;
98
99    if (ctx->list_generators && !ctx->list_filters) {
100        filter_categories = [NSArray arrayWithObjects:kCICategoryGenerator, nil];
101    }
102
103    NSArray *filter_names = [CIFilter filterNamesInCategories:filter_categories];
104    NSEnumerator *filters = [filter_names objectEnumerator];
105
106    NSString *filter_name;
107    while (filter_name = [filters nextObject]) {
108        av_log(ctx, AV_LOG_INFO, "Filter: %s\n", [filter_name UTF8String]);
109        NSString *input;
110
111        CIFilter *filter             = [CIFilter filterWithName:filter_name];
112        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
113        NSArray      *filter_inputs  = [filter inputKeys];  // <nsstring>
114
115        for (input in filter_inputs) {
116            NSDictionary *input_attribs = [filter_attribs valueForKey:input];
117            NSString *input_class       = [input_attribs valueForKey:kCIAttributeClass];
118            if ([input_class isEqualToString:@"NSNumber"]) {
119                NSNumber *value_default = [input_attribs valueForKey:kCIAttributeDefault];
120                NSNumber *value_min     = [input_attribs valueForKey:kCIAttributeSliderMin];
121                NSNumber *value_max     = [input_attribs valueForKey:kCIAttributeSliderMax];
122
123                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\t[%s %s][%s]\n",
124                    [input UTF8String],
125                    [input_class UTF8String],
126                    [[value_min stringValue] UTF8String],
127                    [[value_max stringValue] UTF8String],
128                    [[value_default stringValue] UTF8String]);
129            } else {
130                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\n",
131                    [input UTF8String],
132                    [input_class UTF8String]);
133            }
134        }
135    }
136}
137
138static int query_formats(AVFilterContext *fctx)
139{
140    static const enum AVPixelFormat inout_fmts_rgb[] = {
141        AV_PIX_FMT_ARGB,
142        AV_PIX_FMT_NONE
143    };
144
145    AVFilterFormats *inout_formats;
146    int ret;
147
148    if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
149        return AVERROR(ENOMEM);
150    }
151
152    if ((ret = ff_formats_ref(inout_formats, &fctx->inputs[0]->out_formats)) < 0 ||
153        (ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
154        return ret;
155    }
156
157    return 0;
158}
159
160static int query_formats_src(AVFilterContext *fctx)
161{
162    static const enum AVPixelFormat inout_fmts_rgb[] = {
163        AV_PIX_FMT_ARGB,
164        AV_PIX_FMT_NONE
165    };
166
167    AVFilterFormats *inout_formats;
168    int ret;
169
170    if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
171        return AVERROR(ENOMEM);
172    }
173
174    if ((ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
175        return ret;
176    }
177
178    return 0;
179}
180
181static int apply_filter(CoreImageContext *ctx, AVFilterLink *link, AVFrame *frame)
182{
183    int i;
184
185    // (re-)initialize input image
186    const CGSize frame_size = {
187        frame->width,
188        frame->height
189    };
190
191    NSData *data = [NSData dataWithBytesNoCopy:frame->data[0]
192                           length:frame->height*frame->linesize[0]
193                           freeWhenDone:NO];
194
195    CIImage *ret = [(__bridge CIImage*)ctx->input_image initWithBitmapData:data
196                                                        bytesPerRow:frame->linesize[0]
197                                                        size:frame_size
198                                                        format:kCIFormatARGB8
199                                                        colorSpace:ctx->color_space]; //kCGColorSpaceGenericRGB
200    if (!ret) {
201        av_log(ctx, AV_LOG_ERROR, "Input image could not be initialized.\n");
202        return AVERROR_EXTERNAL;
203    }
204
205    CIFilter *filter       = NULL;
206    CIImage *filter_input  = (__bridge CIImage*)ctx->input_image;
207    CIImage *filter_output = NULL;
208
209    // successively apply all filters
210    for (i = 0; i < ctx->num_filters; i++) {
211        if (i) {
212            // set filter input to previous filter output
213            filter_input    = [(__bridge CIImage*)ctx->filters[i-1] valueForKey:kCIOutputImageKey];
214            CGRect out_rect = [filter_input extent];
215            if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
216                // do not keep padded image regions after filtering
217                out_rect.origin.x    = 0.0f;
218                out_rect.origin.y    = 0.0f;
219                out_rect.size.width  = frame->width;
220                out_rect.size.height = frame->height;
221            }
222            filter_input = [filter_input imageByCroppingToRect:out_rect];
223        }
224
225        filter = (__bridge CIFilter*)ctx->filters[i];
226
227        // do not set input image for the first filter if used as video source
228        if (!ctx->is_video_source || i) {
229            @try {
230                [filter setValue:filter_input forKey:kCIInputImageKey];
231            } @catch (NSException *exception) {
232                if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
233                    av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
234                    return AVERROR_EXTERNAL;
235                } else {
236                    av_log(ctx, AV_LOG_WARNING, "Selected filter does not accept an input image.\n");
237                }
238            }
239        }
240    }
241
242    // get output of last filter
243    filter_output = [filter valueForKey:kCIOutputImageKey];
244
245    if (!filter_output) {
246        av_log(ctx, AV_LOG_ERROR, "Filter output not available.\n");
247        return AVERROR_EXTERNAL;
248    }
249
250    // do not keep padded image regions after filtering
251    CGRect out_rect = [filter_output extent];
252    if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
253        av_log(ctx, AV_LOG_DEBUG, "Cropping output image.\n");
254        out_rect.origin.x    = 0.0f;
255        out_rect.origin.y    = 0.0f;
256        out_rect.size.width  = frame->width;
257        out_rect.size.height = frame->height;
258    }
259
260    CGImageRef out = [(__bridge CIContext*)ctx->glctx createCGImage:filter_output
261                                                      fromRect:out_rect];
262
263    if (!out) {
264        av_log(ctx, AV_LOG_ERROR, "Cannot create valid output image.\n");
265    }
266
267    // create bitmap context on the fly for rendering into current frame->data[]
268    if (ctx->cgctx) {
269        CGContextRelease(ctx->cgctx);
270        ctx->cgctx = NULL;
271    }
272    size_t out_width  = CGImageGetWidth(out);
273    size_t out_height = CGImageGetHeight(out);
274
275    if (out_width > frame->width || out_height > frame->height) { // this might result in segfault
276        av_log(ctx, AV_LOG_WARNING, "Output image has unexpected size: %lux%lu (expected: %ix%i). This may crash...\n",
277               out_width, out_height, frame->width, frame->height);
278    }
279    ctx->cgctx = CGBitmapContextCreate(frame->data[0],
280                                       frame->width,
281                                       frame->height,
282                                       ctx->bits_per_component,
283                                       frame->linesize[0],
284                                       ctx->color_space,
285                                       (uint32_t)kCGImageAlphaPremultipliedFirst); // ARGB
286    if (!ctx->cgctx) {
287        av_log(ctx, AV_LOG_ERROR, "CGBitmap context cannot be created.\n");
288        return AVERROR_EXTERNAL;
289    }
290
291    // copy ("draw") the output image into the frame data
292    CGRect rect = {{0,0},{frame->width, frame->height}};
293    if (ctx->output_rect) {
294        @try {
295            NSString *tmp_string = [NSString stringWithUTF8String:ctx->output_rect];
296            NSRect tmp           = NSRectFromString(tmp_string);
297            rect                 = NSRectToCGRect(tmp);
298        } @catch (NSException *exception) {
299            av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
300            return AVERROR_EXTERNAL;
301        }
302        if (rect.size.width == 0.0f) {
303            av_log(ctx, AV_LOG_WARNING, "Width of output rect is zero.\n");
304        }
305        if (rect.size.height == 0.0f) {
306            av_log(ctx, AV_LOG_WARNING, "Height of output rect is zero.\n");
307        }
308    }
309
310    CGContextDrawImage(ctx->cgctx, rect, out);
311
312    return ff_filter_frame(link, frame);
313}
314
315/** Apply all valid filters successively to the input image.
316 *  The final output image is copied from the GPU by "drawing" using a bitmap context.
317 */
318static int filter_frame(AVFilterLink *link, AVFrame *frame)
319{
320    return apply_filter(link->dst->priv, link->dst->outputs[0], frame);
321}
322
323static int request_frame(AVFilterLink *link)
324{
325    CoreImageContext *ctx = link->src->priv;
326    AVFrame *frame;
327
328    if (ctx->duration >= 0 &&
329        av_rescale_q(ctx->pts, ctx->time_base, AV_TIME_BASE_Q) >= ctx->duration) {
330        return AVERROR_EOF;
331    }
332
333    if (!ctx->picref) {
334        ctx->picref = ff_get_video_buffer(link, ctx->w, ctx->h);
335        if (!ctx->picref) {
336            return AVERROR(ENOMEM);
337        }
338    }
339
340    frame = av_frame_clone(ctx->picref);
341    if (!frame) {
342        return AVERROR(ENOMEM);
343    }
344
345    frame->pts                 = ctx->pts;
346    frame->key_frame           = 1;
347    frame->interlaced_frame    = 0;
348    frame->pict_type           = AV_PICTURE_TYPE_I;
349    frame->sample_aspect_ratio = ctx->sar;
350
351    ctx->pts++;
352
353    return apply_filter(ctx, link, frame);
354}
355
356/** Set an option of the given filter to the provided key-value pair.
357 */
358static void set_option(CoreImageContext *ctx, CIFilter *filter, const char *key, const char *value)
359{
360        NSString *input_key = [NSString stringWithUTF8String:key];
361        NSString *input_val = [NSString stringWithUTF8String:value];
362
363        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
364        NSDictionary *input_attribs  = [filter_attribs valueForKey:input_key];
365
366        NSString *input_class = [input_attribs valueForKey:kCIAttributeClass];
367        NSString *input_type  = [input_attribs valueForKey:kCIAttributeType];
368
369        if (!input_attribs) {
370            av_log(ctx, AV_LOG_WARNING, "Skipping unknown option: \"%s\".\n",
371                   [input_key UTF8String]); // [[filter name] UTF8String]) not currently defined...
372            return;
373        }
374
375        av_log(ctx, AV_LOG_DEBUG, "key: %s, val: %s, #attribs: %lu, class: %s, type: %s\n",
376               [input_key UTF8String],
377               [input_val UTF8String],
378               input_attribs ? (unsigned long)[input_attribs count] : -1,
379               [input_class UTF8String],
380               [input_type UTF8String]);
381
382        if ([input_class isEqualToString:@"NSNumber"]) {
383            float input          = input_val.floatValue;
384            NSNumber *max_value  = [input_attribs valueForKey:kCIAttributeSliderMax];
385            NSNumber *min_value  = [input_attribs valueForKey:kCIAttributeSliderMin];
386            NSNumber *used_value = nil;
387
388#define CLAMP_WARNING do {     \
389av_log(ctx, AV_LOG_WARNING, "Value of \"%f\" for option \"%s\" is out of range [%f %f], clamping to \"%f\".\n", \
390       input,                  \
391       [input_key UTF8String], \
392       min_value.floatValue,   \
393       max_value.floatValue,   \
394       used_value.floatValue); \
395} while(0)
396            if (input > max_value.floatValue) {
397                used_value = max_value;
398                CLAMP_WARNING;
399            } else if (input < min_value.floatValue) {
400                used_value = min_value;
401                CLAMP_WARNING;
402            } else {
403                used_value = [NSNumber numberWithFloat:input];
404            }
405
406            [filter setValue:used_value forKey:input_key];
407        } else if ([input_class isEqualToString:@"CIVector"]) {
408            CIVector *input = [CIVector vectorWithString:input_val];
409
410            if (!input) {
411                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIVctor description: \"%s\".\n",
412                       [input_val UTF8String]);
413                return;
414            }
415
416            [filter setValue:input forKey:input_key];
417        } else if ([input_class isEqualToString:@"CIColor"]) {
418            CIColor *input = [CIColor colorWithString:input_val];
419
420            if (!input) {
421                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIColor description: \"%s\".\n",
422                       [input_val UTF8String]);
423                return;
424            }
425
426            [filter setValue:input forKey:input_key];
427        } else if ([input_class isEqualToString:@"NSString"]) { // set display name as string with latin1 encoding
428            [filter setValue:input_val forKey:input_key];
429        } else if ([input_class isEqualToString:@"NSData"]) { // set display name as string with latin1 encoding
430            NSData *input = [NSData dataWithBytes:(const void*)[input_val cStringUsingEncoding:NSISOLatin1StringEncoding]
431                                    length:[input_val lengthOfBytesUsingEncoding:NSISOLatin1StringEncoding]];
432
433            if (!input) {
434                av_log(ctx, AV_LOG_WARNING, "Skipping invalid NSData description: \"%s\".\n",
435                       [input_val UTF8String]);
436                return;
437            }
438
439            [filter setValue:input forKey:input_key];
440        } else {
441            av_log(ctx, AV_LOG_WARNING, "Skipping unsupported option class: \"%s\".\n",
442                   [input_class UTF8String]);
443            avpriv_report_missing_feature(ctx, "Handling of some option classes");
444            return;
445        }
446}
447
448/** Create a filter object by a given name and set all options to defaults.
449 *  Overwrite any option given by the user to the provided value in filter_options.
450 */
451static CIFilter* create_filter(CoreImageContext *ctx, const char *filter_name, AVDictionary *filter_options)
452{
453    // create filter object
454    CIFilter *filter = [CIFilter filterWithName:[NSString stringWithUTF8String:filter_name]];
455
456    // set default options
457    [filter setDefaults];
458
459    // set user options
460    if (filter_options) {
461        AVDictionaryEntry *o = NULL;
462        while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
463            set_option(ctx, filter, o->key, o->value);
464        }
465    }
466
467    return filter;
468}
469
470static av_cold int init(AVFilterContext *fctx)
471{
472    CoreImageContext *ctx     = fctx->priv;
473    AVDictionary *filter_dict = NULL;
474    AVDictionaryEntry *f      = NULL;
475    AVDictionaryEntry *o      = NULL;
476    int ret;
477    int i;
478
479    if (ctx->list_filters || ctx->list_generators) {
480        list_filters(ctx);
481        return AVERROR_EXIT;
482    }
483
484    if (ctx->filter_string) {
485        // parse filter string (filter=name@opt=val@opt2=val2#name2@opt3=val3) for filters separated by #
486        av_log(ctx, AV_LOG_DEBUG, "Filter_string: %s\n", ctx->filter_string);
487        ret = av_dict_parse_string(&filter_dict, ctx->filter_string, "@", "#", AV_DICT_MULTIKEY); // parse filter_name:all_filter_options
488        if (ret) {
489            av_dict_free(&filter_dict);
490            av_log(ctx, AV_LOG_ERROR, "Parsing of filters failed.\n");
491            return AVERROR(EIO);
492        }
493        ctx->num_filters = av_dict_count(filter_dict);
494        av_log(ctx, AV_LOG_DEBUG, "Filter count: %i\n", ctx->num_filters);
495
496        // allocate CIFilter array
497        ctx->filters = av_mallocz_array(ctx->num_filters, sizeof(CIFilter*));
498        if (!ctx->filters) {
499            av_log(ctx, AV_LOG_ERROR, "Could not allocate filter array.\n");
500            return AVERROR(ENOMEM);
501        }
502
503        // parse filters for option key-value pairs (opt=val@opt2=val2) separated by @
504        i = 0;
505        while ((f = av_dict_get(filter_dict, "", f, AV_DICT_IGNORE_SUFFIX))) {
506            AVDictionary *filter_options = NULL;
507
508            if (strncmp(f->value, "default", 7)) { // not default
509                ret = av_dict_parse_string(&filter_options, f->value, "=", "@", 0); // parse option_name:option_value
510                if (ret) {
511                    av_dict_free(&filter_options);
512                    av_log(ctx, AV_LOG_ERROR, "Parsing of filter options for \"%s\" failed.\n", f->key);
513                    return AVERROR(EIO);
514                }
515            }
516
517            if (av_log_get_level() >= AV_LOG_DEBUG) {
518                av_log(ctx, AV_LOG_DEBUG, "Creating filter %i: \"%s\":\n", i, f->key);
519                if (!filter_options) {
520                    av_log(ctx, AV_LOG_DEBUG, "\tusing default options\n");
521                } else {
522                    while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
523                        av_log(ctx, AV_LOG_DEBUG, "\t%s: %s\n", o->key, o->value);
524                    }
525                }
526            }
527
528            ctx->filters[i] = CFBridgingRetain(create_filter(ctx, f->key, filter_options));
529            if (!ctx->filters[i]) {
530                av_log(ctx, AV_LOG_ERROR, "Could not create filter \"%s\".\n", f->key);
531                return AVERROR(EINVAL);
532            }
533
534            i++;
535        }
536    } else {
537        av_log(ctx, AV_LOG_ERROR, "No filters specified.\n");
538        return AVERROR(EINVAL);
539    }
540
541    // create GPU context on OSX
542    const NSOpenGLPixelFormatAttribute attr[] = {
543        NSOpenGLPFAAccelerated,
544        NSOpenGLPFANoRecovery,
545        NSOpenGLPFAColorSize, 32,
546        0
547    };
548
549    NSOpenGLPixelFormat *pixel_format = [[NSOpenGLPixelFormat alloc] initWithAttributes:(void *)&attr];
550    ctx->color_space                  = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
551    ctx->glctx                        = CFBridgingRetain([CIContext contextWithCGLContext:CGLGetCurrentContext()
552                                                         pixelFormat:[pixel_format CGLPixelFormatObj]
553                                                         colorSpace:ctx->color_space
554                                                         options:nil]);
555
556    if (!ctx->glctx) {
557        av_log(ctx, AV_LOG_ERROR, "CIContext not created.\n");
558        return AVERROR_EXTERNAL;
559    }
560
561    // Creating an empty input image as input container for the context
562    ctx->input_image = CFBridgingRetain([CIImage emptyImage]);
563
564    return 0;
565}
566
567static av_cold int init_src(AVFilterContext *fctx)
568{
569    CoreImageContext *ctx = fctx->priv;
570
571    ctx->is_video_source = 1;
572    ctx->time_base       = av_inv_q(ctx->frame_rate);
573    ctx->pts             = 0;
574
575    return init(fctx);
576}
577
578static av_cold void uninit(AVFilterContext *fctx)
579{
580#define SafeCFRelease(ptr) do { \
581    if (ptr) {                  \
582        CFRelease(ptr);         \
583        ptr = NULL;             \
584    }                           \
585} while (0)
586
587    CoreImageContext *ctx = fctx->priv;
588
589    SafeCFRelease(ctx->glctx);
590    SafeCFRelease(ctx->cgctx);
591    SafeCFRelease(ctx->color_space);
592    SafeCFRelease(ctx->input_image);
593
594    if (ctx->filters) {
595        for (int i = 0; i < ctx->num_filters; i++) {
596            SafeCFRelease(ctx->filters[i]);
597        }
598        av_freep(&ctx->filters);
599    }
600
601    av_frame_free(&ctx->picref);
602}
603
604static const AVFilterPad vf_coreimage_inputs[] = {
605    {
606        .name         = "default",
607        .type         = AVMEDIA_TYPE_VIDEO,
608        .filter_frame = filter_frame,
609        .config_props = config_input,
610    },
611    { NULL }
612};
613
614static const AVFilterPad vf_coreimage_outputs[] = {
615    {
616        .name = "default",
617        .type = AVMEDIA_TYPE_VIDEO,
618    },
619    { NULL }
620};
621
622static const AVFilterPad vsrc_coreimagesrc_outputs[] = {
623    {
624        .name          = "default",
625        .type          = AVMEDIA_TYPE_VIDEO,
626        .request_frame = request_frame,
627        .config_props  = config_output,
628    },
629    { NULL }
630};
631
632#define OFFSET(x) offsetof(CoreImageContext, x)
633#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
634
635#define GENERATOR_OPTIONS                                                                                                               \
636    {"size",     "set video size",                OFFSET(w),          AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0,         FLAGS}, \
637    {"s",        "set video size",                OFFSET(w),          AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0,         FLAGS}, \
638    {"rate",     "set video rate",                OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"},      0, INT_MAX,         FLAGS}, \
639    {"r",        "set video rate",                OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"},      0, INT_MAX,         FLAGS}, \
640    {"duration", "set video duration",            OFFSET(duration),   AV_OPT_TYPE_DURATION,   {.i64 = -1},       -1, INT64_MAX, FLAGS}, \
641    {"d",        "set video duration",            OFFSET(duration),   AV_OPT_TYPE_DURATION,   {.i64 = -1},       -1, INT64_MAX, FLAGS}, \
642    {"sar",      "set video sample aspect ratio", OFFSET(sar),        AV_OPT_TYPE_RATIONAL,   {.dbl = 1},         0, INT_MAX,   FLAGS},
643
644#define FILTER_OPTIONS                                                                                                                           \
645    {"list_filters",    "list available filters",                OFFSET(list_filters),    AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, .flags = FLAGS}, \
646    {"list_generators", "list available generators",             OFFSET(list_generators), AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, .flags = FLAGS}, \
647    {"filter",          "names and options of filters to apply", OFFSET(filter_string),   AV_OPT_TYPE_STRING, {.str = NULL},    .flags = FLAGS}, \
648    {"output_rect",     "output rectangle within output image",  OFFSET(output_rect),     AV_OPT_TYPE_STRING, {.str = NULL},    .flags = FLAGS},
649
650
651// definitions for coreimage video filter
652static const AVOption coreimage_options[] = {
653    FILTER_OPTIONS
654    { NULL }
655};
656
657AVFILTER_DEFINE_CLASS(coreimage);
658
659AVFilter ff_vf_coreimage = {
660    .name          = "coreimage",
661    .description   = NULL_IF_CONFIG_SMALL("Video filtering using CoreImage API."),
662    .init          = init,
663    .uninit        = uninit,
664    .priv_size     = sizeof(CoreImageContext),
665    .priv_class    = &coreimage_class,
666    .inputs        = vf_coreimage_inputs,
667    .outputs       = vf_coreimage_outputs,
668    .query_formats = query_formats,
669};
670
671// definitions for coreimagesrc video source
672static const AVOption coreimagesrc_options[] = {
673    GENERATOR_OPTIONS
674    FILTER_OPTIONS
675    { NULL }
676};
677
678AVFILTER_DEFINE_CLASS(coreimagesrc);
679
680AVFilter ff_vsrc_coreimagesrc = {
681    .name          = "coreimagesrc",
682    .description   = NULL_IF_CONFIG_SMALL("Video source using image generators of CoreImage API."),
683    .init          = init_src,
684    .uninit        = uninit,
685    .priv_size     = sizeof(CoreImageContext),
686    .priv_class    = &coreimagesrc_class,
687    .inputs        = NULL,
688    .outputs       = vsrc_coreimagesrc_outputs,
689    .query_formats = query_formats_src,
690};
691