1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <android/log.h>
18 #include <android/native_window.h>
19 #include <android/native_window_jni.h>
20
21 #include "cpu_features_macros.h" // NOLINT
22 #ifdef CPU_FEATURES_ARCH_ARM
23 #include "cpuinfo_arm.h" // NOLINT
24 #endif // CPU_FEATURES_ARCH_ARM
25 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
26 #include <arm_neon.h>
27 #endif // CPU_FEATURES_COMPILED_ANY_ARM_NEON
28 #include <jni.h>
29
30 #include <cstdint>
31 #include <cstring>
32 #include <mutex> // NOLINT
33 #include <new>
34
35 #include "cpu_info.h" // NOLINT
36 #include "gav1/decoder.h"
37
38 #define LOG_TAG "gav1_jni"
39 #define LOGE(...) \
40 ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
41
42 #define DECODER_FUNC(RETURN_TYPE, NAME, ...) \
43 extern "C" { \
44 JNIEXPORT RETURN_TYPE \
45 Java_com_google_android_exoplayer2_ext_av1_Gav1Decoder_##NAME( \
46 JNIEnv* env, jobject thiz, ##__VA_ARGS__); \
47 } \
48 JNIEXPORT RETURN_TYPE \
49 Java_com_google_android_exoplayer2_ext_av1_Gav1Decoder_##NAME( \
50 JNIEnv* env, jobject thiz, ##__VA_ARGS__)
51
JNI_OnLoad(JavaVM * vm,void * reserved)52 jint JNI_OnLoad(JavaVM* vm, void* reserved) {
53 JNIEnv* env;
54 if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
55 return -1;
56 }
57 return JNI_VERSION_1_6;
58 }
59
60 namespace {
61
62 // YUV plane indices.
63 const int kPlaneY = 0;
64 const int kPlaneU = 1;
65 const int kPlaneV = 2;
66 const int kMaxPlanes = 3;
67
68 // Android YUV format. See:
69 // https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12.
70 const int kImageFormatYV12 = 0x32315659;
71
72 // Output modes.
73 const int kOutputModeYuv = 0;
74 const int kOutputModeSurfaceYuv = 1;
75
76 const int kColorSpaceUnknown = 0;
77
78 // Return codes for jni methods.
79 const int kStatusError = 0;
80 const int kStatusOk = 1;
81 const int kStatusDecodeOnly = 2;
82
83 // Status codes specific to the JNI wrapper code.
84 enum JniStatusCode {
85 kJniStatusOk = 0,
86 kJniStatusOutOfMemory = -1,
87 kJniStatusBufferAlreadyReleased = -2,
88 kJniStatusInvalidNumOfPlanes = -3,
89 kJniStatusBitDepth12NotSupportedWithYuv = -4,
90 kJniStatusHighBitDepthNotSupportedWithSurfaceYuv = -5,
91 kJniStatusANativeWindowError = -6,
92 kJniStatusBufferResizeError = -7,
93 kJniStatusNeonNotSupported = -8
94 };
95
GetJniErrorMessage(JniStatusCode error_code)96 const char* GetJniErrorMessage(JniStatusCode error_code) {
97 switch (error_code) {
98 case kJniStatusOutOfMemory:
99 return "Out of memory.";
100 case kJniStatusBufferAlreadyReleased:
101 return "JNI buffer already released.";
102 case kJniStatusBitDepth12NotSupportedWithYuv:
103 return "Bit depth 12 is not supported with YUV.";
104 case kJniStatusHighBitDepthNotSupportedWithSurfaceYuv:
105 return "High bit depth (10 or 12 bits per pixel) output format is not "
106 "supported with YUV surface.";
107 case kJniStatusInvalidNumOfPlanes:
108 return "Libgav1 decoded buffer has invalid number of planes.";
109 case kJniStatusANativeWindowError:
110 return "ANativeWindow error.";
111 case kJniStatusBufferResizeError:
112 return "Buffer resize failed.";
113 case kJniStatusNeonNotSupported:
114 return "Neon is not supported.";
115 default:
116 return "Unrecognized error code.";
117 }
118 }
119
120 // Manages frame buffer and reference information.
121 class JniFrameBuffer {
122 public:
JniFrameBuffer(int id)123 explicit JniFrameBuffer(int id) : id_(id), reference_count_(0) {}
~JniFrameBuffer()124 ~JniFrameBuffer() {
125 for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
126 delete[] raw_buffer_[plane_index];
127 }
128 }
129
130 // Not copyable or movable.
131 JniFrameBuffer(const JniFrameBuffer&) = delete;
132 JniFrameBuffer(JniFrameBuffer&&) = delete;
133 JniFrameBuffer& operator=(const JniFrameBuffer&) = delete;
134 JniFrameBuffer& operator=(JniFrameBuffer&&) = delete;
135
SetFrameData(const libgav1::DecoderBuffer & decoder_buffer)136 void SetFrameData(const libgav1::DecoderBuffer& decoder_buffer) {
137 for (int plane_index = kPlaneY; plane_index < decoder_buffer.NumPlanes();
138 plane_index++) {
139 stride_[plane_index] = decoder_buffer.stride[plane_index];
140 plane_[plane_index] = decoder_buffer.plane[plane_index];
141 displayed_width_[plane_index] =
142 decoder_buffer.displayed_width[plane_index];
143 displayed_height_[plane_index] =
144 decoder_buffer.displayed_height[plane_index];
145 }
146 }
147
Stride(int plane_index) const148 int Stride(int plane_index) const { return stride_[plane_index]; }
Plane(int plane_index) const149 uint8_t* Plane(int plane_index) const { return plane_[plane_index]; }
DisplayedWidth(int plane_index) const150 int DisplayedWidth(int plane_index) const {
151 return displayed_width_[plane_index];
152 }
DisplayedHeight(int plane_index) const153 int DisplayedHeight(int plane_index) const {
154 return displayed_height_[plane_index];
155 }
156
157 // Methods maintaining reference count are not thread-safe. They must be
158 // called with a lock held.
AddReference()159 void AddReference() { ++reference_count_; }
RemoveReference()160 void RemoveReference() { reference_count_--; }
InUse() const161 bool InUse() const { return reference_count_ != 0; }
162
RawBuffer(int plane_index) const163 uint8_t* RawBuffer(int plane_index) const { return raw_buffer_[plane_index]; }
BufferPrivateData() const164 void* BufferPrivateData() const { return const_cast<int*>(&id_); }
165
166 // Attempts to reallocate data planes if the existing ones don't have enough
167 // capacity. Returns true if the allocation was successful or wasn't needed,
168 // false if the allocation failed.
MaybeReallocateGav1DataPlanes(int y_plane_min_size,int uv_plane_min_size)169 bool MaybeReallocateGav1DataPlanes(int y_plane_min_size,
170 int uv_plane_min_size) {
171 for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
172 const int min_size =
173 (plane_index == kPlaneY) ? y_plane_min_size : uv_plane_min_size;
174 if (raw_buffer_size_[plane_index] >= min_size) continue;
175 delete[] raw_buffer_[plane_index];
176 raw_buffer_[plane_index] = new (std::nothrow) uint8_t[min_size];
177 if (!raw_buffer_[plane_index]) {
178 raw_buffer_size_[plane_index] = 0;
179 return false;
180 }
181 raw_buffer_size_[plane_index] = min_size;
182 }
183 return true;
184 }
185
186 private:
187 int stride_[kMaxPlanes];
188 uint8_t* plane_[kMaxPlanes];
189 int displayed_width_[kMaxPlanes];
190 int displayed_height_[kMaxPlanes];
191 const int id_;
192 int reference_count_;
193 // Pointers to the raw buffers allocated for the data planes.
194 uint8_t* raw_buffer_[kMaxPlanes] = {};
195 // Sizes of the raw buffers in bytes.
196 size_t raw_buffer_size_[kMaxPlanes] = {};
197 };
198
199 // Manages frame buffers used by libgav1 decoder and ExoPlayer.
200 // Handles synchronization between libgav1 and ExoPlayer threads.
201 class JniBufferManager {
202 public:
~JniBufferManager()203 ~JniBufferManager() {
204 // This lock does not do anything since libgav1 has released all the frame
205 // buffers. It exists to merely be consistent with all other usage of
206 // |all_buffers_| and |all_buffer_count_|.
207 std::lock_guard<std::mutex> lock(mutex_);
208 while (all_buffer_count_--) {
209 delete all_buffers_[all_buffer_count_];
210 }
211 }
212
GetBuffer(size_t y_plane_min_size,size_t uv_plane_min_size,JniFrameBuffer ** jni_buffer)213 JniStatusCode GetBuffer(size_t y_plane_min_size, size_t uv_plane_min_size,
214 JniFrameBuffer** jni_buffer) {
215 std::lock_guard<std::mutex> lock(mutex_);
216
217 JniFrameBuffer* output_buffer;
218 if (free_buffer_count_) {
219 output_buffer = free_buffers_[--free_buffer_count_];
220 } else if (all_buffer_count_ < kMaxFrames) {
221 output_buffer = new (std::nothrow) JniFrameBuffer(all_buffer_count_);
222 if (output_buffer == nullptr) return kJniStatusOutOfMemory;
223 all_buffers_[all_buffer_count_++] = output_buffer;
224 } else {
225 // Maximum number of buffers is being used.
226 return kJniStatusOutOfMemory;
227 }
228 if (!output_buffer->MaybeReallocateGav1DataPlanes(y_plane_min_size,
229 uv_plane_min_size)) {
230 return kJniStatusOutOfMemory;
231 }
232
233 output_buffer->AddReference();
234 *jni_buffer = output_buffer;
235
236 return kJniStatusOk;
237 }
238
GetBuffer(int id) const239 JniFrameBuffer* GetBuffer(int id) const { return all_buffers_[id]; }
240
AddBufferReference(int id)241 void AddBufferReference(int id) {
242 std::lock_guard<std::mutex> lock(mutex_);
243 all_buffers_[id]->AddReference();
244 }
245
ReleaseBuffer(int id)246 JniStatusCode ReleaseBuffer(int id) {
247 std::lock_guard<std::mutex> lock(mutex_);
248 JniFrameBuffer* buffer = all_buffers_[id];
249 if (!buffer->InUse()) {
250 return kJniStatusBufferAlreadyReleased;
251 }
252 buffer->RemoveReference();
253 if (!buffer->InUse()) {
254 free_buffers_[free_buffer_count_++] = buffer;
255 }
256 return kJniStatusOk;
257 }
258
259 private:
260 static const int kMaxFrames = 32;
261
262 JniFrameBuffer* all_buffers_[kMaxFrames];
263 int all_buffer_count_ = 0;
264
265 JniFrameBuffer* free_buffers_[kMaxFrames];
266 int free_buffer_count_ = 0;
267
268 std::mutex mutex_;
269 };
270
271 struct JniContext {
~JniContext__anonb7207cab0111::JniContext272 ~JniContext() {
273 if (native_window) {
274 ANativeWindow_release(native_window);
275 }
276 }
277
MaybeAcquireNativeWindow__anonb7207cab0111::JniContext278 bool MaybeAcquireNativeWindow(JNIEnv* env, jobject new_surface) {
279 if (surface == new_surface) {
280 return true;
281 }
282 if (native_window) {
283 ANativeWindow_release(native_window);
284 }
285 native_window_width = 0;
286 native_window_height = 0;
287 native_window = ANativeWindow_fromSurface(env, new_surface);
288 if (native_window == nullptr) {
289 jni_status_code = kJniStatusANativeWindowError;
290 surface = nullptr;
291 return false;
292 }
293 surface = new_surface;
294 return true;
295 }
296
297 jfieldID decoder_private_field;
298 jfieldID output_mode_field;
299 jfieldID data_field;
300 jmethodID init_for_private_frame_method;
301 jmethodID init_for_yuv_frame_method;
302
303 JniBufferManager buffer_manager;
304 // The libgav1 decoder instance has to be deleted before |buffer_manager| is
305 // destructed. This will make sure that libgav1 releases all the frame
306 // buffers that it might be holding references to. So this has to be declared
307 // after |buffer_manager| since the destruction happens in reverse order of
308 // declaration.
309 libgav1::Decoder decoder;
310
311 ANativeWindow* native_window = nullptr;
312 jobject surface = nullptr;
313 int native_window_width = 0;
314 int native_window_height = 0;
315
316 Libgav1StatusCode libgav1_status_code = kLibgav1StatusOk;
317 JniStatusCode jni_status_code = kJniStatusOk;
318 };
319
Libgav1GetFrameBuffer(void * callback_private_data,int bitdepth,libgav1::ImageFormat image_format,int width,int height,int left_border,int right_border,int top_border,int bottom_border,int stride_alignment,libgav1::FrameBuffer * frame_buffer)320 Libgav1StatusCode Libgav1GetFrameBuffer(void* callback_private_data,
321 int bitdepth,
322 libgav1::ImageFormat image_format,
323 int width, int height, int left_border,
324 int right_border, int top_border,
325 int bottom_border, int stride_alignment,
326 libgav1::FrameBuffer* frame_buffer) {
327 libgav1::FrameBufferInfo info;
328 Libgav1StatusCode status = libgav1::ComputeFrameBufferInfo(
329 bitdepth, image_format, width, height, left_border, right_border,
330 top_border, bottom_border, stride_alignment, &info);
331 if (status != kLibgav1StatusOk) return status;
332
333 JniContext* const context = static_cast<JniContext*>(callback_private_data);
334 JniFrameBuffer* jni_buffer;
335 context->jni_status_code = context->buffer_manager.GetBuffer(
336 info.y_buffer_size, info.uv_buffer_size, &jni_buffer);
337 if (context->jni_status_code != kJniStatusOk) {
338 LOGE("%s", GetJniErrorMessage(context->jni_status_code));
339 return kLibgav1StatusOutOfMemory;
340 }
341
342 uint8_t* const y_buffer = jni_buffer->RawBuffer(0);
343 uint8_t* const u_buffer =
344 (info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(1) : nullptr;
345 uint8_t* const v_buffer =
346 (info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(2) : nullptr;
347
348 return libgav1::SetFrameBuffer(&info, y_buffer, u_buffer, v_buffer,
349 jni_buffer->BufferPrivateData(), frame_buffer);
350 }
351
Libgav1ReleaseFrameBuffer(void * callback_private_data,void * buffer_private_data)352 void Libgav1ReleaseFrameBuffer(void* callback_private_data,
353 void* buffer_private_data) {
354 JniContext* const context = static_cast<JniContext*>(callback_private_data);
355 const int buffer_id = *static_cast<const int*>(buffer_private_data);
356 context->jni_status_code = context->buffer_manager.ReleaseBuffer(buffer_id);
357 if (context->jni_status_code != kJniStatusOk) {
358 LOGE("%s", GetJniErrorMessage(context->jni_status_code));
359 }
360 }
361
AlignTo16(int value)362 constexpr int AlignTo16(int value) { return (value + 15) & (~15); }
363
CopyPlane(const uint8_t * source,int source_stride,uint8_t * destination,int destination_stride,int width,int height)364 void CopyPlane(const uint8_t* source, int source_stride, uint8_t* destination,
365 int destination_stride, int width, int height) {
366 while (height--) {
367 std::memcpy(destination, source, width);
368 source += source_stride;
369 destination += destination_stride;
370 }
371 }
372
CopyFrameToDataBuffer(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)373 void CopyFrameToDataBuffer(const libgav1::DecoderBuffer* decoder_buffer,
374 jbyte* data) {
375 for (int plane_index = kPlaneY; plane_index < decoder_buffer->NumPlanes();
376 plane_index++) {
377 const uint64_t length = decoder_buffer->stride[plane_index] *
378 decoder_buffer->displayed_height[plane_index];
379 memcpy(data, decoder_buffer->plane[plane_index], length);
380 data += length;
381 }
382 }
383
Convert10BitFrameTo8BitDataBuffer(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)384 void Convert10BitFrameTo8BitDataBuffer(
385 const libgav1::DecoderBuffer* decoder_buffer, jbyte* data) {
386 for (int plane_index = kPlaneY; plane_index < decoder_buffer->NumPlanes();
387 plane_index++) {
388 int sample = 0;
389 const uint8_t* source = decoder_buffer->plane[plane_index];
390 for (int i = 0; i < decoder_buffer->displayed_height[plane_index]; i++) {
391 const uint16_t* source_16 = reinterpret_cast<const uint16_t*>(source);
392 for (int j = 0; j < decoder_buffer->displayed_width[plane_index]; j++) {
393 // Lightweight dither. Carryover the remainder of each 10->8 bit
394 // conversion to the next pixel.
395 sample += source_16[j];
396 data[j] = sample >> 2;
397 sample &= 3; // Remainder.
398 }
399 source += decoder_buffer->stride[plane_index];
400 data += decoder_buffer->stride[plane_index];
401 }
402 }
403 }
404
405 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
Convert10BitFrameTo8BitDataBufferNeon(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)406 void Convert10BitFrameTo8BitDataBufferNeon(
407 const libgav1::DecoderBuffer* decoder_buffer, jbyte* data) {
408 uint32x2_t lcg_value = vdup_n_u32(random());
409 lcg_value = vset_lane_u32(random(), lcg_value, 1);
410 // LCG values recommended in "Numerical Recipes".
411 const uint32x2_t LCG_MULT = vdup_n_u32(1664525);
412 const uint32x2_t LCG_INCR = vdup_n_u32(1013904223);
413
414 for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
415 const uint8_t* source = decoder_buffer->plane[plane_index];
416
417 for (int i = 0; i < decoder_buffer->displayed_height[plane_index]; i++) {
418 const uint16_t* source_16 = reinterpret_cast<const uint16_t*>(source);
419 uint8_t* destination = reinterpret_cast<uint8_t*>(data);
420
421 // Each read consumes 4 2-byte samples, but to reduce branches and
422 // random steps we unroll to 4 rounds, so each loop consumes 16
423 // samples.
424 const int j_max = decoder_buffer->displayed_width[plane_index] & ~15;
425 int j;
426 for (j = 0; j < j_max; j += 16) {
427 // Run a round of the RNG.
428 lcg_value = vmla_u32(LCG_INCR, lcg_value, LCG_MULT);
429
430 // Round 1.
431 // The lower two bits of this LCG parameterization are garbage,
432 // leaving streaks on the image. We access the upper bits of each
433 // 16-bit lane by shifting. (We use this both as an 8- and 16-bit
434 // vector, so the choice of which one to keep it as is arbitrary.)
435 uint8x8_t randvec =
436 vreinterpret_u8_u16(vshr_n_u16(vreinterpret_u16_u32(lcg_value), 8));
437
438 // We retrieve the values and shift them so that the bits we'll
439 // shift out (after biasing) are in the upper 8 bits of each 16-bit
440 // lane.
441 uint16x4_t values = vshl_n_u16(vld1_u16(source_16), 6);
442 // We add the bias bits in the lower 8 to the shifted values to get
443 // the final values in the upper 8 bits.
444 uint16x4_t added_1 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
445 source_16 += 4;
446
447 // Round 2.
448 // Shifting the randvec bits left by 2 bits, as an 8-bit vector,
449 // should leave us with enough bias to get the needed rounding
450 // operation.
451 randvec = vshl_n_u8(randvec, 2);
452
453 // Retrieve and sum the next 4 pixels.
454 values = vshl_n_u16(vld1_u16(source_16), 6);
455 uint16x4_t added_2 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
456 source_16 += 4;
457
458 // Reinterpret the two added vectors as 8x8, zip them together, and
459 // discard the lower portions.
460 uint8x8_t zipped =
461 vuzp_u8(vreinterpret_u8_u16(added_1), vreinterpret_u8_u16(added_2))
462 .val[1];
463 vst1_u8(destination, zipped);
464 destination += 8;
465
466 // Run it again with the next two rounds using the remaining
467 // entropy in randvec.
468
469 // Round 3.
470 randvec = vshl_n_u8(randvec, 2);
471 values = vshl_n_u16(vld1_u16(source_16), 6);
472 added_1 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
473 source_16 += 4;
474
475 // Round 4.
476 randvec = vshl_n_u8(randvec, 2);
477 values = vshl_n_u16(vld1_u16(source_16), 6);
478 added_2 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
479 source_16 += 4;
480
481 zipped =
482 vuzp_u8(vreinterpret_u8_u16(added_1), vreinterpret_u8_u16(added_2))
483 .val[1];
484 vst1_u8(destination, zipped);
485 destination += 8;
486 }
487
488 uint32_t randval = 0;
489 // For the remaining pixels in each row - usually none, as most
490 // standard sizes are divisible by 32 - convert them "by hand".
491 for (; j < decoder_buffer->displayed_width[plane_index]; j++) {
492 if (!randval) randval = random();
493 destination[j] = (source_16[j] + (randval & 3)) >> 2;
494 randval >>= 2;
495 }
496
497 source += decoder_buffer->stride[plane_index];
498 data += decoder_buffer->stride[plane_index];
499 }
500 }
501 }
502 #endif // CPU_FEATURES_COMPILED_ANY_ARM_NEON
503
504 } // namespace
505
DECODER_FUNC(jlong,gav1Init,jint threads)506 DECODER_FUNC(jlong, gav1Init, jint threads) {
507 JniContext* context = new (std::nothrow) JniContext();
508 if (context == nullptr) {
509 return kStatusError;
510 }
511
512 #ifdef CPU_FEATURES_ARCH_ARM
513 // Libgav1 requires NEON with arm ABIs.
514 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
515 const cpu_features::ArmFeatures arm_features =
516 cpu_features::GetArmInfo().features;
517 if (!arm_features.neon) {
518 context->jni_status_code = kJniStatusNeonNotSupported;
519 return reinterpret_cast<jlong>(context);
520 }
521 #else
522 context->jni_status_code = kJniStatusNeonNotSupported;
523 return reinterpret_cast<jlong>(context);
524 #endif // CPU_FEATURES_COMPILED_ANY_ARM_NEON
525 #endif // CPU_FEATURES_ARCH_ARM
526
527 libgav1::DecoderSettings settings;
528 settings.threads = threads;
529 settings.get_frame_buffer = Libgav1GetFrameBuffer;
530 settings.release_frame_buffer = Libgav1ReleaseFrameBuffer;
531 settings.callback_private_data = context;
532
533 context->libgav1_status_code = context->decoder.Init(&settings);
534 if (context->libgav1_status_code != kLibgav1StatusOk) {
535 return reinterpret_cast<jlong>(context);
536 }
537
538 // Populate JNI References.
539 const jclass outputBufferClass = env->FindClass(
540 "com/google/android/exoplayer2/decoder/VideoDecoderOutputBuffer");
541 context->decoder_private_field =
542 env->GetFieldID(outputBufferClass, "decoderPrivate", "I");
543 context->output_mode_field = env->GetFieldID(outputBufferClass, "mode", "I");
544 context->data_field =
545 env->GetFieldID(outputBufferClass, "data", "Ljava/nio/ByteBuffer;");
546 context->init_for_private_frame_method =
547 env->GetMethodID(outputBufferClass, "initForPrivateFrame", "(II)V");
548 context->init_for_yuv_frame_method =
549 env->GetMethodID(outputBufferClass, "initForYuvFrame", "(IIIII)Z");
550
551 return reinterpret_cast<jlong>(context);
552 }
553
DECODER_FUNC(void,gav1Close,jlong jContext)554 DECODER_FUNC(void, gav1Close, jlong jContext) {
555 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
556 delete context;
557 }
558
DECODER_FUNC(jint,gav1Decode,jlong jContext,jobject encodedData,jint length)559 DECODER_FUNC(jint, gav1Decode, jlong jContext, jobject encodedData,
560 jint length) {
561 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
562 const uint8_t* const buffer = reinterpret_cast<const uint8_t*>(
563 env->GetDirectBufferAddress(encodedData));
564 context->libgav1_status_code =
565 context->decoder.EnqueueFrame(buffer, length, /*user_private_data=*/0,
566 /*buffer_private_data=*/nullptr);
567 if (context->libgav1_status_code != kLibgav1StatusOk) {
568 return kStatusError;
569 }
570 return kStatusOk;
571 }
572
DECODER_FUNC(jint,gav1GetFrame,jlong jContext,jobject jOutputBuffer,jboolean decodeOnly)573 DECODER_FUNC(jint, gav1GetFrame, jlong jContext, jobject jOutputBuffer,
574 jboolean decodeOnly) {
575 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
576 const libgav1::DecoderBuffer* decoder_buffer;
577 context->libgav1_status_code = context->decoder.DequeueFrame(&decoder_buffer);
578 if (context->libgav1_status_code != kLibgav1StatusOk) {
579 return kStatusError;
580 }
581
582 if (decodeOnly || decoder_buffer == nullptr) {
583 // This is not an error. The input data was decode-only or no displayable
584 // frames are available.
585 return kStatusDecodeOnly;
586 }
587
588 const int output_mode =
589 env->GetIntField(jOutputBuffer, context->output_mode_field);
590 if (output_mode == kOutputModeYuv) {
591 // Resize the buffer if required. Default color conversion will be used as
592 // libgav1::DecoderBuffer doesn't expose color space info.
593 const jboolean init_result = env->CallBooleanMethod(
594 jOutputBuffer, context->init_for_yuv_frame_method,
595 decoder_buffer->displayed_width[kPlaneY],
596 decoder_buffer->displayed_height[kPlaneY],
597 decoder_buffer->stride[kPlaneY], decoder_buffer->stride[kPlaneU],
598 kColorSpaceUnknown);
599 if (env->ExceptionCheck()) {
600 // Exception is thrown in Java when returning from the native call.
601 return kStatusError;
602 }
603 if (!init_result) {
604 context->jni_status_code = kJniStatusBufferResizeError;
605 return kStatusError;
606 }
607
608 const jobject data_object =
609 env->GetObjectField(jOutputBuffer, context->data_field);
610 jbyte* const data =
611 reinterpret_cast<jbyte*>(env->GetDirectBufferAddress(data_object));
612
613 switch (decoder_buffer->bitdepth) {
614 case 8:
615 CopyFrameToDataBuffer(decoder_buffer, data);
616 break;
617 case 10:
618 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
619 Convert10BitFrameTo8BitDataBufferNeon(decoder_buffer, data);
620 #else
621 Convert10BitFrameTo8BitDataBuffer(decoder_buffer, data);
622 #endif // CPU_FEATURES_COMPILED_ANY_ARM_NEON
623 break;
624 default:
625 context->jni_status_code = kJniStatusBitDepth12NotSupportedWithYuv;
626 return kStatusError;
627 }
628 } else if (output_mode == kOutputModeSurfaceYuv) {
629 if (decoder_buffer->bitdepth != 8) {
630 context->jni_status_code =
631 kJniStatusHighBitDepthNotSupportedWithSurfaceYuv;
632 return kStatusError;
633 }
634
635 if (decoder_buffer->NumPlanes() > kMaxPlanes) {
636 context->jni_status_code = kJniStatusInvalidNumOfPlanes;
637 return kStatusError;
638 }
639
640 const int buffer_id =
641 *static_cast<const int*>(decoder_buffer->buffer_private_data);
642 context->buffer_manager.AddBufferReference(buffer_id);
643 JniFrameBuffer* const jni_buffer =
644 context->buffer_manager.GetBuffer(buffer_id);
645 jni_buffer->SetFrameData(*decoder_buffer);
646 env->CallVoidMethod(jOutputBuffer, context->init_for_private_frame_method,
647 decoder_buffer->displayed_width[kPlaneY],
648 decoder_buffer->displayed_height[kPlaneY]);
649 if (env->ExceptionCheck()) {
650 // Exception is thrown in Java when returning from the native call.
651 return kStatusError;
652 }
653 env->SetIntField(jOutputBuffer, context->decoder_private_field, buffer_id);
654 }
655
656 return kStatusOk;
657 }
658
DECODER_FUNC(jint,gav1RenderFrame,jlong jContext,jobject jSurface,jobject jOutputBuffer)659 DECODER_FUNC(jint, gav1RenderFrame, jlong jContext, jobject jSurface,
660 jobject jOutputBuffer) {
661 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
662 const int buffer_id =
663 env->GetIntField(jOutputBuffer, context->decoder_private_field);
664 JniFrameBuffer* const jni_buffer =
665 context->buffer_manager.GetBuffer(buffer_id);
666
667 if (!context->MaybeAcquireNativeWindow(env, jSurface)) {
668 return kStatusError;
669 }
670
671 if (context->native_window_width != jni_buffer->DisplayedWidth(kPlaneY) ||
672 context->native_window_height != jni_buffer->DisplayedHeight(kPlaneY)) {
673 if (ANativeWindow_setBuffersGeometry(
674 context->native_window, jni_buffer->DisplayedWidth(kPlaneY),
675 jni_buffer->DisplayedHeight(kPlaneY), kImageFormatYV12)) {
676 context->jni_status_code = kJniStatusANativeWindowError;
677 return kStatusError;
678 }
679 context->native_window_width = jni_buffer->DisplayedWidth(kPlaneY);
680 context->native_window_height = jni_buffer->DisplayedHeight(kPlaneY);
681 }
682
683 ANativeWindow_Buffer native_window_buffer;
684 if (ANativeWindow_lock(context->native_window, &native_window_buffer,
685 /*inOutDirtyBounds=*/nullptr) ||
686 native_window_buffer.bits == nullptr) {
687 context->jni_status_code = kJniStatusANativeWindowError;
688 return kStatusError;
689 }
690
691 // Y plane
692 CopyPlane(jni_buffer->Plane(kPlaneY), jni_buffer->Stride(kPlaneY),
693 reinterpret_cast<uint8_t*>(native_window_buffer.bits),
694 native_window_buffer.stride, jni_buffer->DisplayedWidth(kPlaneY),
695 jni_buffer->DisplayedHeight(kPlaneY));
696
697 const int y_plane_size =
698 native_window_buffer.stride * native_window_buffer.height;
699 const int32_t native_window_buffer_uv_height =
700 (native_window_buffer.height + 1) / 2;
701 const int native_window_buffer_uv_stride =
702 AlignTo16(native_window_buffer.stride / 2);
703
704 // TODO(b/140606738): Handle monochrome videos.
705
706 // V plane
707 // Since the format for ANativeWindow is YV12, V plane is being processed
708 // before U plane.
709 const int v_plane_height = std::min(native_window_buffer_uv_height,
710 jni_buffer->DisplayedHeight(kPlaneV));
711 CopyPlane(
712 jni_buffer->Plane(kPlaneV), jni_buffer->Stride(kPlaneV),
713 reinterpret_cast<uint8_t*>(native_window_buffer.bits) + y_plane_size,
714 native_window_buffer_uv_stride, jni_buffer->DisplayedWidth(kPlaneV),
715 v_plane_height);
716
717 const int v_plane_size = v_plane_height * native_window_buffer_uv_stride;
718
719 // U plane
720 CopyPlane(jni_buffer->Plane(kPlaneU), jni_buffer->Stride(kPlaneU),
721 reinterpret_cast<uint8_t*>(native_window_buffer.bits) +
722 y_plane_size + v_plane_size,
723 native_window_buffer_uv_stride, jni_buffer->DisplayedWidth(kPlaneU),
724 std::min(native_window_buffer_uv_height,
725 jni_buffer->DisplayedHeight(kPlaneU)));
726
727 if (ANativeWindow_unlockAndPost(context->native_window)) {
728 context->jni_status_code = kJniStatusANativeWindowError;
729 return kStatusError;
730 }
731
732 return kStatusOk;
733 }
734
DECODER_FUNC(void,gav1ReleaseFrame,jlong jContext,jobject jOutputBuffer)735 DECODER_FUNC(void, gav1ReleaseFrame, jlong jContext, jobject jOutputBuffer) {
736 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
737 const int buffer_id =
738 env->GetIntField(jOutputBuffer, context->decoder_private_field);
739 env->SetIntField(jOutputBuffer, context->decoder_private_field, -1);
740 context->jni_status_code = context->buffer_manager.ReleaseBuffer(buffer_id);
741 if (context->jni_status_code != kJniStatusOk) {
742 LOGE("%s", GetJniErrorMessage(context->jni_status_code));
743 }
744 }
745
DECODER_FUNC(jstring,gav1GetErrorMessage,jlong jContext)746 DECODER_FUNC(jstring, gav1GetErrorMessage, jlong jContext) {
747 if (jContext == 0) {
748 return env->NewStringUTF("Failed to initialize JNI context.");
749 }
750
751 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
752 if (context->libgav1_status_code != kLibgav1StatusOk) {
753 return env->NewStringUTF(
754 libgav1::GetErrorString(context->libgav1_status_code));
755 }
756 if (context->jni_status_code != kJniStatusOk) {
757 return env->NewStringUTF(GetJniErrorMessage(context->jni_status_code));
758 }
759
760 return env->NewStringUTF("None.");
761 }
762
DECODER_FUNC(jint,gav1CheckError,jlong jContext)763 DECODER_FUNC(jint, gav1CheckError, jlong jContext) {
764 JniContext* const context = reinterpret_cast<JniContext*>(jContext);
765 if (context->libgav1_status_code != kLibgav1StatusOk ||
766 context->jni_status_code != kJniStatusOk) {
767 return kStatusError;
768 }
769 return kStatusOk;
770 }
771
DECODER_FUNC(jint,gav1GetThreads)772 DECODER_FUNC(jint, gav1GetThreads) {
773 return gav1_jni::GetNumberOfPerformanceCoresOnline();
774 }
775
776 // TODO(b/139902005): Add functions for getting libgav1 version and build
777 // configuration once libgav1 ABI provides this information.
778