1 /*
2 * librav1e encoder
3 *
4 * Copyright (c) 2019 Derek Buitenhuis
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23 #include <rav1e.h>
24
25 #include "libavutil/internal.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/base64.h"
28 #include "libavutil/common.h"
29 #include "libavutil/mathematics.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avcodec.h"
33 #include "internal.h"
34
35 typedef struct librav1eContext {
36 const AVClass *class;
37
38 RaContext *ctx;
39 AVBSFContext *bsf;
40
41 uint8_t *pass_data;
42 size_t pass_pos;
43 int pass_size;
44
45 AVDictionary *rav1e_opts;
46 int quantizer;
47 int speed;
48 int tiles;
49 int tile_rows;
50 int tile_cols;
51 } librav1eContext;
52
range_map(enum AVPixelFormat pix_fmt,enum AVColorRange range)53 static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
54 {
55 switch (pix_fmt) {
56 case AV_PIX_FMT_YUVJ420P:
57 case AV_PIX_FMT_YUVJ422P:
58 case AV_PIX_FMT_YUVJ444P:
59 return RA_PIXEL_RANGE_FULL;
60 }
61
62 switch (range) {
63 case AVCOL_RANGE_JPEG:
64 return RA_PIXEL_RANGE_FULL;
65 case AVCOL_RANGE_MPEG:
66 default:
67 return RA_PIXEL_RANGE_LIMITED;
68 }
69 }
70
pix_fmt_map(enum AVPixelFormat pix_fmt)71 static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
72 {
73 switch (pix_fmt) {
74 case AV_PIX_FMT_YUV420P:
75 case AV_PIX_FMT_YUVJ420P:
76 case AV_PIX_FMT_YUV420P10:
77 case AV_PIX_FMT_YUV420P12:
78 return RA_CHROMA_SAMPLING_CS420;
79 case AV_PIX_FMT_YUV422P:
80 case AV_PIX_FMT_YUVJ422P:
81 case AV_PIX_FMT_YUV422P10:
82 case AV_PIX_FMT_YUV422P12:
83 return RA_CHROMA_SAMPLING_CS422;
84 case AV_PIX_FMT_YUV444P:
85 case AV_PIX_FMT_YUVJ444P:
86 case AV_PIX_FMT_YUV444P10:
87 case AV_PIX_FMT_YUV444P12:
88 return RA_CHROMA_SAMPLING_CS444;
89 default:
90 av_assert0(0);
91 }
92 }
93
chroma_loc_map(enum AVChromaLocation chroma_loc)94 static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
95 {
96 switch (chroma_loc) {
97 case AVCHROMA_LOC_LEFT:
98 return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
99 case AVCHROMA_LOC_TOPLEFT:
100 return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
101 default:
102 return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
103 }
104 }
105
get_stats(AVCodecContext * avctx,int eos)106 static int get_stats(AVCodecContext *avctx, int eos)
107 {
108 librav1eContext *ctx = avctx->priv_data;
109 RaData* buf = rav1e_twopass_out(ctx->ctx);
110 if (!buf)
111 return 0;
112
113 if (!eos) {
114 uint8_t *tmp = av_fast_realloc(ctx->pass_data, &ctx->pass_size,
115 ctx->pass_pos + buf->len);
116 if (!tmp) {
117 rav1e_data_unref(buf);
118 return AVERROR(ENOMEM);
119 }
120
121 ctx->pass_data = tmp;
122 memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
123 ctx->pass_pos += buf->len;
124 } else {
125 size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
126
127 memcpy(ctx->pass_data, buf->data, buf->len);
128
129 avctx->stats_out = av_malloc(b64_size);
130 if (!avctx->stats_out) {
131 rav1e_data_unref(buf);
132 return AVERROR(ENOMEM);
133 }
134
135 av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
136
137 av_freep(&ctx->pass_data);
138 }
139
140 rav1e_data_unref(buf);
141
142 return 0;
143 }
144
set_stats(AVCodecContext * avctx)145 static int set_stats(AVCodecContext *avctx)
146 {
147 librav1eContext *ctx = avctx->priv_data;
148 int ret = 1;
149
150 while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
151 ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
152 if (ret < 0)
153 return AVERROR_EXTERNAL;
154 ctx->pass_pos += ret;
155 }
156
157 return 0;
158 }
159
librav1e_encode_close(AVCodecContext * avctx)160 static av_cold int librav1e_encode_close(AVCodecContext *avctx)
161 {
162 librav1eContext *ctx = avctx->priv_data;
163
164 if (ctx->ctx) {
165 rav1e_context_unref(ctx->ctx);
166 ctx->ctx = NULL;
167 }
168
169 av_bsf_free(&ctx->bsf);
170 av_freep(&ctx->pass_data);
171
172 return 0;
173 }
174
librav1e_encode_init(AVCodecContext * avctx)175 static av_cold int librav1e_encode_init(AVCodecContext *avctx)
176 {
177 librav1eContext *ctx = avctx->priv_data;
178 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
179 RaConfig *cfg = NULL;
180 int rret;
181 int ret = 0;
182
183 cfg = rav1e_config_default();
184 if (!cfg) {
185 av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
186 return AVERROR_EXTERNAL;
187 }
188
189 /*
190 * Rav1e currently uses the time base given to it only for ratecontrol... where
191 * the inverse is taken and used as a framerate. So, do what we do in other wrappers
192 * and use the framerate if we can.
193 */
194 if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
195 rav1e_config_set_time_base(cfg, (RaRational) {
196 avctx->framerate.den, avctx->framerate.num
197 });
198 } else {
199 rav1e_config_set_time_base(cfg, (RaRational) {
200 avctx->time_base.num * avctx->ticks_per_frame,
201 avctx->time_base.den
202 });
203 }
204
205 if ((avctx->flags & AV_CODEC_FLAG_PASS1 || avctx->flags & AV_CODEC_FLAG_PASS2) && !avctx->bit_rate) {
206 av_log(avctx, AV_LOG_ERROR, "A bitrate must be set to use two pass mode.\n");
207 ret = AVERROR_INVALIDDATA;
208 goto end;
209 }
210
211 if (avctx->flags & AV_CODEC_FLAG_PASS2) {
212 if (!avctx->stats_in) {
213 av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
214 ret = AVERROR(EINVAL);
215 goto end;
216 }
217
218 ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
219 ctx->pass_data = av_malloc(ctx->pass_size);
220 if (!ctx->pass_data) {
221 av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
222 ret = AVERROR(ENOMEM);
223 goto end;
224 }
225
226 ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
227 if (ctx->pass_size < 0) {
228 av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
229 ret = AVERROR(EINVAL);
230 goto end;
231 }
232 }
233
234 if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
235 const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
236 int bret;
237
238 if (!filter) {
239 av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
240 "not found. This is a bug, please report it.\n");
241 ret = AVERROR_BUG;
242 goto end;
243 }
244
245 bret = av_bsf_alloc(filter, &ctx->bsf);
246 if (bret < 0) {
247 ret = bret;
248 goto end;
249 }
250
251 bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
252 if (bret < 0) {
253 ret = bret;
254 goto end;
255 }
256
257 bret = av_bsf_init(ctx->bsf);
258 if (bret < 0) {
259 ret = bret;
260 goto end;
261 }
262 }
263
264 {
265 AVDictionaryEntry *en = NULL;
266 while ((en = av_dict_get(ctx->rav1e_opts, "", en, AV_DICT_IGNORE_SUFFIX))) {
267 int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
268 if (parse_ret < 0)
269 av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
270 }
271 }
272
273 rret = rav1e_config_parse_int(cfg, "width", avctx->width);
274 if (rret < 0) {
275 av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
276 ret = AVERROR_INVALIDDATA;
277 goto end;
278 }
279
280 rret = rav1e_config_parse_int(cfg, "height", avctx->height);
281 if (rret < 0) {
282 av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
283 ret = AVERROR_INVALIDDATA;
284 goto end;
285 }
286
287 rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
288 if (rret < 0)
289 av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
290
291 if (ctx->speed >= 0) {
292 rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
293 if (rret < 0) {
294 av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
295 ret = AVERROR_EXTERNAL;
296 goto end;
297 }
298 }
299
300 /* rav1e handles precedence between 'tiles' and cols/rows for us. */
301 if (ctx->tiles > 0) {
302 rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
303 if (rret < 0) {
304 av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
305 ret = AVERROR_EXTERNAL;
306 goto end;
307 }
308 }
309 if (ctx->tile_rows > 0) {
310 rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
311 if (rret < 0) {
312 av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
313 ret = AVERROR_EXTERNAL;
314 goto end;
315 }
316 }
317 if (ctx->tile_cols > 0) {
318 rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
319 if (rret < 0) {
320 av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
321 ret = AVERROR_EXTERNAL;
322 goto end;
323 }
324 }
325
326 if (avctx->gop_size > 0) {
327 rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
328 if (rret < 0) {
329 av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
330 ret = AVERROR_EXTERNAL;
331 goto end;
332 }
333 }
334
335 if (avctx->keyint_min > 0) {
336 rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
337 if (rret < 0) {
338 av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
339 ret = AVERROR_EXTERNAL;
340 goto end;
341 }
342 }
343
344 if (avctx->bit_rate && ctx->quantizer < 0) {
345 int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
346
347 rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
348 if (rret < 0) {
349 av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
350 ret = AVERROR_EXTERNAL;
351 goto end;
352 }
353
354 if (avctx->qmin >= 0) {
355 rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
356 if (rret < 0) {
357 av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
358 ret = AVERROR_EXTERNAL;
359 goto end;
360 }
361 }
362
363 rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
364 if (rret < 0) {
365 av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
366 ret = AVERROR_INVALIDDATA;
367 goto end;
368 }
369 } else if (ctx->quantizer >= 0) {
370 if (avctx->bit_rate)
371 av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
372
373 rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
374 if (rret < 0) {
375 av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
376 ret = AVERROR_EXTERNAL;
377 goto end;
378 }
379 }
380
381 rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
382 pix_fmt_map(avctx->pix_fmt),
383 chroma_loc_map(avctx->chroma_sample_location),
384 range_map(avctx->pix_fmt, avctx->color_range));
385 if (rret < 0) {
386 av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
387 ret = AVERROR_INVALIDDATA;
388 goto end;
389 }
390
391 /* rav1e's colorspace enums match standard values. */
392 rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
393 (RaColorPrimaries) avctx->color_primaries,
394 (RaTransferCharacteristics) avctx->color_trc);
395 if (rret < 0) {
396 av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
397 if (avctx->err_recognition & AV_EF_EXPLODE) {
398 ret = AVERROR_INVALIDDATA;
399 goto end;
400 }
401 }
402
403 ctx->ctx = rav1e_context_new(cfg);
404 if (!ctx->ctx) {
405 av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
406 ret = AVERROR_EXTERNAL;
407 goto end;
408 }
409
410 ret = 0;
411
412 end:
413
414 rav1e_config_unref(cfg);
415
416 return ret;
417 }
418
librav1e_send_frame(AVCodecContext * avctx,const AVFrame * frame)419 static int librav1e_send_frame(AVCodecContext *avctx, const AVFrame *frame)
420 {
421 librav1eContext *ctx = avctx->priv_data;
422 RaFrame *rframe = NULL;
423 int ret;
424
425 if (frame) {
426 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
427
428 rframe = rav1e_frame_new(ctx->ctx);
429 if (!rframe) {
430 av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
431 return AVERROR(ENOMEM);
432 }
433
434 for (int i = 0; i < desc->nb_components; i++) {
435 int shift = i ? desc->log2_chroma_h : 0;
436 int bytes = desc->comp[0].depth == 8 ? 1 : 2;
437 rav1e_frame_fill_plane(rframe, i, frame->data[i],
438 (frame->height >> shift) * frame->linesize[i],
439 frame->linesize[i], bytes);
440 }
441 }
442
443 ret = rav1e_send_frame(ctx->ctx, rframe);
444 if (rframe)
445 rav1e_frame_unref(rframe); /* No need to unref if flushing. */
446
447 switch (ret) {
448 case RA_ENCODER_STATUS_SUCCESS:
449 break;
450 case RA_ENCODER_STATUS_ENOUGH_DATA:
451 return AVERROR(EAGAIN);
452 case RA_ENCODER_STATUS_FAILURE:
453 av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
454 return AVERROR_EXTERNAL;
455 default:
456 av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
457 return AVERROR_UNKNOWN;
458 }
459
460 return 0;
461 }
462
librav1e_receive_packet(AVCodecContext * avctx,AVPacket * pkt)463 static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
464 {
465 librav1eContext *ctx = avctx->priv_data;
466 RaPacket *rpkt = NULL;
467 int ret;
468
469 retry:
470
471 if (avctx->flags & AV_CODEC_FLAG_PASS1) {
472 int sret = get_stats(avctx, 0);
473 if (sret < 0)
474 return sret;
475 } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
476 int sret = set_stats(avctx);
477 if (sret < 0)
478 return sret;
479 }
480
481 ret = rav1e_receive_packet(ctx->ctx, &rpkt);
482 switch (ret) {
483 case RA_ENCODER_STATUS_SUCCESS:
484 break;
485 case RA_ENCODER_STATUS_LIMIT_REACHED:
486 if (avctx->flags & AV_CODEC_FLAG_PASS1) {
487 int sret = get_stats(avctx, 1);
488 if (sret < 0)
489 return sret;
490 }
491 return AVERROR_EOF;
492 case RA_ENCODER_STATUS_ENCODED:
493 if (avctx->internal->draining)
494 goto retry;
495 return AVERROR(EAGAIN);
496 case RA_ENCODER_STATUS_NEED_MORE_DATA:
497 if (avctx->internal->draining) {
498 av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
499 return AVERROR_EXTERNAL;
500 }
501 return AVERROR(EAGAIN);
502 case RA_ENCODER_STATUS_FAILURE:
503 av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
504 return AVERROR_EXTERNAL;
505 default:
506 av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
507 return AVERROR_UNKNOWN;
508 }
509
510 ret = av_new_packet(pkt, rpkt->len);
511 if (ret < 0) {
512 av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
513 rav1e_packet_unref(rpkt);
514 return ret;
515 }
516
517 memcpy(pkt->data, rpkt->data, rpkt->len);
518
519 if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
520 pkt->flags |= AV_PKT_FLAG_KEY;
521
522 pkt->pts = pkt->dts = rpkt->input_frameno * avctx->ticks_per_frame;
523 rav1e_packet_unref(rpkt);
524
525 if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
526 int ret = av_bsf_send_packet(ctx->bsf, pkt);
527 if (ret < 0) {
528 av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
529 av_packet_unref(pkt);
530 return ret;
531 }
532
533 ret = av_bsf_receive_packet(ctx->bsf, pkt);
534 if (ret < 0) {
535 av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
536 av_packet_unref(pkt);
537 return ret;
538 }
539 }
540
541 return 0;
542 }
543
544 #define OFFSET(x) offsetof(librav1eContext, x)
545 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
546
547 static const AVOption options[] = {
548 { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 255, VE },
549 { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
550 { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
551 { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
552 { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
553 { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE },
554 { NULL }
555 };
556
557 static const AVCodecDefault librav1e_defaults[] = {
558 { "b", "0" },
559 { "g", "0" },
560 { "keyint_min", "0" },
561 { "qmax", "-1" },
562 { "qmin", "-1" },
563 { NULL }
564 };
565
566 const enum AVPixelFormat librav1e_pix_fmts[] = {
567 AV_PIX_FMT_YUV420P,
568 AV_PIX_FMT_YUVJ420P,
569 AV_PIX_FMT_YUV420P10,
570 AV_PIX_FMT_YUV420P12,
571 AV_PIX_FMT_YUV422P,
572 AV_PIX_FMT_YUVJ422P,
573 AV_PIX_FMT_YUV422P10,
574 AV_PIX_FMT_YUV422P12,
575 AV_PIX_FMT_YUV444P,
576 AV_PIX_FMT_YUVJ444P,
577 AV_PIX_FMT_YUV444P10,
578 AV_PIX_FMT_YUV444P12,
579 AV_PIX_FMT_NONE
580 };
581
582 static const AVClass class = {
583 .class_name = "librav1e",
584 .item_name = av_default_item_name,
585 .option = options,
586 .version = LIBAVUTIL_VERSION_INT,
587 };
588
589 AVCodec ff_librav1e_encoder = {
590 .name = "librav1e",
591 .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
592 .type = AVMEDIA_TYPE_VIDEO,
593 .id = AV_CODEC_ID_AV1,
594 .init = librav1e_encode_init,
595 .send_frame = librav1e_send_frame,
596 .receive_packet = librav1e_receive_packet,
597 .close = librav1e_encode_close,
598 .priv_data_size = sizeof(librav1eContext),
599 .priv_class = &class,
600 .defaults = librav1e_defaults,
601 .pix_fmts = librav1e_pix_fmts,
602 .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AUTO_THREADS,
603 .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
604 .wrapper_name = "librav1e",
605 };
606