• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <android/log.h>
18 #include <android/native_window.h>
19 #include <android/native_window_jni.h>
20 
21 #include "cpu_features_macros.h"  // NOLINT
22 #ifdef CPU_FEATURES_ARCH_ARM
23 #include "cpuinfo_arm.h"  // NOLINT
24 #endif                    // CPU_FEATURES_ARCH_ARM
25 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
26 #include <arm_neon.h>
27 #endif  // CPU_FEATURES_COMPILED_ANY_ARM_NEON
28 #include <jni.h>
29 
30 #include <cstdint>
31 #include <cstring>
32 #include <mutex>  // NOLINT
33 #include <new>
34 
35 #include "gav1/decoder.h"
36 
37 #define LOG_TAG "gav1_jni"
38 #define LOGE(...) \
39   ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
40 
41 #define DECODER_FUNC(RETURN_TYPE, NAME, ...)                         \
42   extern "C" {                                                       \
43   JNIEXPORT RETURN_TYPE                                              \
44       Java_com_google_android_exoplayer2_ext_av1_Gav1Decoder_##NAME( \
45           JNIEnv* env, jobject thiz, ##__VA_ARGS__);                 \
46   }                                                                  \
47   JNIEXPORT RETURN_TYPE                                              \
48       Java_com_google_android_exoplayer2_ext_av1_Gav1Decoder_##NAME( \
49           JNIEnv* env, jobject thiz, ##__VA_ARGS__)
50 
JNI_OnLoad(JavaVM * vm,void * reserved)51 jint JNI_OnLoad(JavaVM* vm, void* reserved) {
52   JNIEnv* env;
53   if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
54     return -1;
55   }
56   return JNI_VERSION_1_6;
57 }
58 
59 namespace {
60 
61 // YUV plane indices.
62 const int kPlaneY = 0;
63 const int kPlaneU = 1;
64 const int kPlaneV = 2;
65 const int kMaxPlanes = 3;
66 
67 // Android YUV format. See:
68 // https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12.
69 const int kImageFormatYV12 = 0x32315659;
70 
71 // LINT.IfChange
72 // Output modes.
73 const int kOutputModeYuv = 0;
74 const int kOutputModeSurfaceYuv = 1;
75 // LINT.ThenChange(../../../../../library/common/src/main/java/com/google/android/exoplayer2/C.java)
76 
77 // LINT.IfChange
78 const int kColorSpaceUnknown = 0;
79 // LINT.ThenChange(../../../../../library/core/src/main/java/com/google/android/exoplayer2/video/VideoDecoderOutputBuffer.java)
80 
81 // LINT.IfChange
82 // Return codes for jni methods.
83 const int kStatusError = 0;
84 const int kStatusOk = 1;
85 const int kStatusDecodeOnly = 2;
86 // LINT.ThenChange(../java/com/google/android/exoplayer2/ext/av1/Gav1Decoder.java)
87 
88 // Status codes specific to the JNI wrapper code.
89 enum JniStatusCode {
90   kJniStatusOk = 0,
91   kJniStatusOutOfMemory = -1,
92   kJniStatusBufferAlreadyReleased = -2,
93   kJniStatusInvalidNumOfPlanes = -3,
94   kJniStatusBitDepth12NotSupportedWithYuv = -4,
95   kJniStatusHighBitDepthNotSupportedWithSurfaceYuv = -5,
96   kJniStatusANativeWindowError = -6,
97   kJniStatusBufferResizeError = -7,
98   kJniStatusNeonNotSupported = -8
99 };
100 
GetJniErrorMessage(JniStatusCode error_code)101 const char* GetJniErrorMessage(JniStatusCode error_code) {
102   switch (error_code) {
103     case kJniStatusOutOfMemory:
104       return "Out of memory.";
105     case kJniStatusBufferAlreadyReleased:
106       return "JNI buffer already released.";
107     case kJniStatusBitDepth12NotSupportedWithYuv:
108       return "Bit depth 12 is not supported with YUV.";
109     case kJniStatusHighBitDepthNotSupportedWithSurfaceYuv:
110       return "High bit depth (10 or 12 bits per pixel) output format is not "
111              "supported with YUV surface.";
112     case kJniStatusInvalidNumOfPlanes:
113       return "Libgav1 decoded buffer has invalid number of planes.";
114     case kJniStatusANativeWindowError:
115       return "ANativeWindow error.";
116     case kJniStatusBufferResizeError:
117       return "Buffer resize failed.";
118     case kJniStatusNeonNotSupported:
119       return "Neon is not supported.";
120     default:
121       return "Unrecognized error code.";
122   }
123 }
124 
125 // Manages frame buffer and reference information.
126 class JniFrameBuffer {
127  public:
JniFrameBuffer(int id)128   explicit JniFrameBuffer(int id) : id_(id), reference_count_(0) {}
~JniFrameBuffer()129   ~JniFrameBuffer() {
130     for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
131       delete[] raw_buffer_[plane_index];
132     }
133   }
134 
135   // Not copyable or movable.
136   JniFrameBuffer(const JniFrameBuffer&) = delete;
137   JniFrameBuffer(JniFrameBuffer&&) = delete;
138   JniFrameBuffer& operator=(const JniFrameBuffer&) = delete;
139   JniFrameBuffer& operator=(JniFrameBuffer&&) = delete;
140 
SetFrameData(const libgav1::DecoderBuffer & decoder_buffer)141   void SetFrameData(const libgav1::DecoderBuffer& decoder_buffer) {
142     for (int plane_index = kPlaneY; plane_index < decoder_buffer.NumPlanes();
143          plane_index++) {
144       stride_[plane_index] = decoder_buffer.stride[plane_index];
145       plane_[plane_index] = decoder_buffer.plane[plane_index];
146       displayed_width_[plane_index] =
147           decoder_buffer.displayed_width[plane_index];
148       displayed_height_[plane_index] =
149           decoder_buffer.displayed_height[plane_index];
150     }
151   }
152 
Stride(int plane_index) const153   int Stride(int plane_index) const { return stride_[plane_index]; }
Plane(int plane_index) const154   uint8_t* Plane(int plane_index) const { return plane_[plane_index]; }
DisplayedWidth(int plane_index) const155   int DisplayedWidth(int plane_index) const {
156     return displayed_width_[plane_index];
157   }
DisplayedHeight(int plane_index) const158   int DisplayedHeight(int plane_index) const {
159     return displayed_height_[plane_index];
160   }
161 
162   // Methods maintaining reference count are not thread-safe. They must be
163   // called with a lock held.
AddReference()164   void AddReference() { ++reference_count_; }
RemoveReference()165   void RemoveReference() { reference_count_--; }
InUse() const166   bool InUse() const { return reference_count_ != 0; }
167 
RawBuffer(int plane_index) const168   uint8_t* RawBuffer(int plane_index) const { return raw_buffer_[plane_index]; }
BufferPrivateData() const169   void* BufferPrivateData() const { return const_cast<int*>(&id_); }
170 
171   // Attempts to reallocate data planes if the existing ones don't have enough
172   // capacity. Returns true if the allocation was successful or wasn't needed,
173   // false if the allocation failed.
MaybeReallocateGav1DataPlanes(int y_plane_min_size,int uv_plane_min_size)174   bool MaybeReallocateGav1DataPlanes(int y_plane_min_size,
175                                      int uv_plane_min_size) {
176     for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
177       const int min_size =
178           (plane_index == kPlaneY) ? y_plane_min_size : uv_plane_min_size;
179       if (raw_buffer_size_[plane_index] >= min_size) continue;
180       delete[] raw_buffer_[plane_index];
181       raw_buffer_[plane_index] = new (std::nothrow) uint8_t[min_size];
182       if (!raw_buffer_[plane_index]) {
183         raw_buffer_size_[plane_index] = 0;
184         return false;
185       }
186       raw_buffer_size_[plane_index] = min_size;
187     }
188     return true;
189   }
190 
191  private:
192   int stride_[kMaxPlanes];
193   uint8_t* plane_[kMaxPlanes];
194   int displayed_width_[kMaxPlanes];
195   int displayed_height_[kMaxPlanes];
196   const int id_;
197   int reference_count_;
198   // Pointers to the raw buffers allocated for the data planes.
199   uint8_t* raw_buffer_[kMaxPlanes] = {};
200   // Sizes of the raw buffers in bytes.
201   size_t raw_buffer_size_[kMaxPlanes] = {};
202 };
203 
204 // Manages frame buffers used by libgav1 decoder and ExoPlayer.
205 // Handles synchronization between libgav1 and ExoPlayer threads.
206 class JniBufferManager {
207  public:
~JniBufferManager()208   ~JniBufferManager() {
209     // This lock does not do anything since libgav1 has released all the frame
210     // buffers. It exists to merely be consistent with all other usage of
211     // |all_buffers_| and |all_buffer_count_|.
212     std::lock_guard<std::mutex> lock(mutex_);
213     while (all_buffer_count_--) {
214       delete all_buffers_[all_buffer_count_];
215     }
216   }
217 
GetBuffer(size_t y_plane_min_size,size_t uv_plane_min_size,JniFrameBuffer ** jni_buffer)218   JniStatusCode GetBuffer(size_t y_plane_min_size, size_t uv_plane_min_size,
219                           JniFrameBuffer** jni_buffer) {
220     std::lock_guard<std::mutex> lock(mutex_);
221 
222     JniFrameBuffer* output_buffer;
223     if (free_buffer_count_) {
224       output_buffer = free_buffers_[--free_buffer_count_];
225     } else if (all_buffer_count_ < kMaxFrames) {
226       output_buffer = new (std::nothrow) JniFrameBuffer(all_buffer_count_);
227       if (output_buffer == nullptr) return kJniStatusOutOfMemory;
228       all_buffers_[all_buffer_count_++] = output_buffer;
229     } else {
230       // Maximum number of buffers is being used.
231       return kJniStatusOutOfMemory;
232     }
233     if (!output_buffer->MaybeReallocateGav1DataPlanes(y_plane_min_size,
234                                                       uv_plane_min_size)) {
235       return kJniStatusOutOfMemory;
236     }
237 
238     output_buffer->AddReference();
239     *jni_buffer = output_buffer;
240 
241     return kJniStatusOk;
242   }
243 
GetBuffer(int id) const244   JniFrameBuffer* GetBuffer(int id) const { return all_buffers_[id]; }
245 
AddBufferReference(int id)246   void AddBufferReference(int id) {
247     std::lock_guard<std::mutex> lock(mutex_);
248     all_buffers_[id]->AddReference();
249   }
250 
ReleaseBuffer(int id)251   JniStatusCode ReleaseBuffer(int id) {
252     std::lock_guard<std::mutex> lock(mutex_);
253     JniFrameBuffer* buffer = all_buffers_[id];
254     if (!buffer->InUse()) {
255       return kJniStatusBufferAlreadyReleased;
256     }
257     buffer->RemoveReference();
258     if (!buffer->InUse()) {
259       free_buffers_[free_buffer_count_++] = buffer;
260     }
261     return kJniStatusOk;
262   }
263 
264  private:
265   static const int kMaxFrames = 32;
266 
267   JniFrameBuffer* all_buffers_[kMaxFrames];
268   int all_buffer_count_ = 0;
269 
270   JniFrameBuffer* free_buffers_[kMaxFrames];
271   int free_buffer_count_ = 0;
272 
273   std::mutex mutex_;
274 };
275 
276 struct JniContext {
~JniContext__anon6e9eb4b70111::JniContext277   ~JniContext() {
278     if (native_window) {
279       ANativeWindow_release(native_window);
280     }
281   }
282 
MaybeAcquireNativeWindow__anon6e9eb4b70111::JniContext283   bool MaybeAcquireNativeWindow(JNIEnv* env, jobject new_surface) {
284     if (surface == new_surface) {
285       return true;
286     }
287     if (native_window) {
288       ANativeWindow_release(native_window);
289     }
290     native_window_width = 0;
291     native_window_height = 0;
292     native_window = ANativeWindow_fromSurface(env, new_surface);
293     if (native_window == nullptr) {
294       jni_status_code = kJniStatusANativeWindowError;
295       surface = nullptr;
296       return false;
297     }
298     surface = new_surface;
299     return true;
300   }
301 
302   jfieldID decoder_private_field;
303   jfieldID output_mode_field;
304   jfieldID data_field;
305   jmethodID init_for_private_frame_method;
306   jmethodID init_for_yuv_frame_method;
307 
308   JniBufferManager buffer_manager;
309   // The libgav1 decoder instance has to be deleted before |buffer_manager| is
310   // destructed. This will make sure that libgav1 releases all the frame
311   // buffers that it might be holding references to. So this has to be declared
312   // after |buffer_manager| since the destruction happens in reverse order of
313   // declaration.
314   libgav1::Decoder decoder;
315 
316   ANativeWindow* native_window = nullptr;
317   jobject surface = nullptr;
318   int native_window_width = 0;
319   int native_window_height = 0;
320 
321   Libgav1StatusCode libgav1_status_code = kLibgav1StatusOk;
322   JniStatusCode jni_status_code = kJniStatusOk;
323 };
324 
Libgav1GetFrameBuffer(void * callback_private_data,int bitdepth,libgav1::ImageFormat image_format,int width,int height,int left_border,int right_border,int top_border,int bottom_border,int stride_alignment,libgav1::FrameBuffer * frame_buffer)325 Libgav1StatusCode Libgav1GetFrameBuffer(void* callback_private_data,
326                                         int bitdepth,
327                                         libgav1::ImageFormat image_format,
328                                         int width, int height, int left_border,
329                                         int right_border, int top_border,
330                                         int bottom_border, int stride_alignment,
331                                         libgav1::FrameBuffer* frame_buffer) {
332   libgav1::FrameBufferInfo info;
333   Libgav1StatusCode status = libgav1::ComputeFrameBufferInfo(
334       bitdepth, image_format, width, height, left_border, right_border,
335       top_border, bottom_border, stride_alignment, &info);
336   if (status != kLibgav1StatusOk) return status;
337 
338   JniContext* const context = static_cast<JniContext*>(callback_private_data);
339   JniFrameBuffer* jni_buffer;
340   context->jni_status_code = context->buffer_manager.GetBuffer(
341       info.y_buffer_size, info.uv_buffer_size, &jni_buffer);
342   if (context->jni_status_code != kJniStatusOk) {
343     LOGE("%s", GetJniErrorMessage(context->jni_status_code));
344     return kLibgav1StatusOutOfMemory;
345   }
346 
347   uint8_t* const y_buffer = jni_buffer->RawBuffer(0);
348   uint8_t* const u_buffer =
349       (info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(1) : nullptr;
350   uint8_t* const v_buffer =
351       (info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(2) : nullptr;
352 
353   return libgav1::SetFrameBuffer(&info, y_buffer, u_buffer, v_buffer,
354                                  jni_buffer->BufferPrivateData(), frame_buffer);
355 }
356 
Libgav1ReleaseFrameBuffer(void * callback_private_data,void * buffer_private_data)357 void Libgav1ReleaseFrameBuffer(void* callback_private_data,
358                                void* buffer_private_data) {
359   JniContext* const context = static_cast<JniContext*>(callback_private_data);
360   const int buffer_id = *static_cast<const int*>(buffer_private_data);
361   context->jni_status_code = context->buffer_manager.ReleaseBuffer(buffer_id);
362   if (context->jni_status_code != kJniStatusOk) {
363     LOGE("%s", GetJniErrorMessage(context->jni_status_code));
364   }
365 }
366 
AlignTo16(int value)367 constexpr int AlignTo16(int value) { return (value + 15) & (~15); }
368 
CopyPlane(const uint8_t * source,int source_stride,uint8_t * destination,int destination_stride,int width,int height)369 void CopyPlane(const uint8_t* source, int source_stride, uint8_t* destination,
370                int destination_stride, int width, int height) {
371   while (height--) {
372     std::memcpy(destination, source, width);
373     source += source_stride;
374     destination += destination_stride;
375   }
376 }
377 
CopyFrameToDataBuffer(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)378 void CopyFrameToDataBuffer(const libgav1::DecoderBuffer* decoder_buffer,
379                            jbyte* data) {
380   for (int plane_index = kPlaneY; plane_index < decoder_buffer->NumPlanes();
381        plane_index++) {
382     const uint64_t length = decoder_buffer->stride[plane_index] *
383                             decoder_buffer->displayed_height[plane_index];
384     memcpy(data, decoder_buffer->plane[plane_index], length);
385     data += length;
386   }
387 }
388 
Convert10BitFrameTo8BitDataBuffer(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)389 void Convert10BitFrameTo8BitDataBuffer(
390     const libgav1::DecoderBuffer* decoder_buffer, jbyte* data) {
391   for (int plane_index = kPlaneY; plane_index < decoder_buffer->NumPlanes();
392        plane_index++) {
393     int sample = 0;
394     const uint8_t* source = decoder_buffer->plane[plane_index];
395     for (int i = 0; i < decoder_buffer->displayed_height[plane_index]; i++) {
396       const uint16_t* source_16 = reinterpret_cast<const uint16_t*>(source);
397       for (int j = 0; j < decoder_buffer->displayed_width[plane_index]; j++) {
398         // Lightweight dither. Carryover the remainder of each 10->8 bit
399         // conversion to the next pixel.
400         sample += source_16[j];
401         data[j] = sample >> 2;
402         sample &= 3;  // Remainder.
403       }
404       source += decoder_buffer->stride[plane_index];
405       data += decoder_buffer->stride[plane_index];
406     }
407   }
408 }
409 
410 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
Convert10BitFrameTo8BitDataBufferNeon(const libgav1::DecoderBuffer * decoder_buffer,jbyte * data)411 void Convert10BitFrameTo8BitDataBufferNeon(
412     const libgav1::DecoderBuffer* decoder_buffer, jbyte* data) {
413   uint32x2_t lcg_value = vdup_n_u32(random());
414   lcg_value = vset_lane_u32(random(), lcg_value, 1);
415   // LCG values recommended in "Numerical Recipes".
416   const uint32x2_t LCG_MULT = vdup_n_u32(1664525);
417   const uint32x2_t LCG_INCR = vdup_n_u32(1013904223);
418 
419   for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
420     const uint8_t* source = decoder_buffer->plane[plane_index];
421 
422     for (int i = 0; i < decoder_buffer->displayed_height[plane_index]; i++) {
423       const uint16_t* source_16 = reinterpret_cast<const uint16_t*>(source);
424       uint8_t* destination = reinterpret_cast<uint8_t*>(data);
425 
426       // Each read consumes 4 2-byte samples, but to reduce branches and
427       // random steps we unroll to 4 rounds, so each loop consumes 16
428       // samples.
429       const int j_max = decoder_buffer->displayed_width[plane_index] & ~15;
430       int j;
431       for (j = 0; j < j_max; j += 16) {
432         // Run a round of the RNG.
433         lcg_value = vmla_u32(LCG_INCR, lcg_value, LCG_MULT);
434 
435         // Round 1.
436         // The lower two bits of this LCG parameterization are garbage,
437         // leaving streaks on the image. We access the upper bits of each
438         // 16-bit lane by shifting. (We use this both as an 8- and 16-bit
439         // vector, so the choice of which one to keep it as is arbitrary.)
440         uint8x8_t randvec =
441             vreinterpret_u8_u16(vshr_n_u16(vreinterpret_u16_u32(lcg_value), 8));
442 
443         // We retrieve the values and shift them so that the bits we'll
444         // shift out (after biasing) are in the upper 8 bits of each 16-bit
445         // lane.
446         uint16x4_t values = vshl_n_u16(vld1_u16(source_16), 6);
447         // We add the bias bits in the lower 8 to the shifted values to get
448         // the final values in the upper 8 bits.
449         uint16x4_t added_1 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
450         source_16 += 4;
451 
452         // Round 2.
453         // Shifting the randvec bits left by 2 bits, as an 8-bit vector,
454         // should leave us with enough bias to get the needed rounding
455         // operation.
456         randvec = vshl_n_u8(randvec, 2);
457 
458         // Retrieve and sum the next 4 pixels.
459         values = vshl_n_u16(vld1_u16(source_16), 6);
460         uint16x4_t added_2 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
461         source_16 += 4;
462 
463         // Reinterpret the two added vectors as 8x8, zip them together, and
464         // discard the lower portions.
465         uint8x8_t zipped =
466             vuzp_u8(vreinterpret_u8_u16(added_1), vreinterpret_u8_u16(added_2))
467                 .val[1];
468         vst1_u8(destination, zipped);
469         destination += 8;
470 
471         // Run it again with the next two rounds using the remaining
472         // entropy in randvec.
473 
474         // Round 3.
475         randvec = vshl_n_u8(randvec, 2);
476         values = vshl_n_u16(vld1_u16(source_16), 6);
477         added_1 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
478         source_16 += 4;
479 
480         // Round 4.
481         randvec = vshl_n_u8(randvec, 2);
482         values = vshl_n_u16(vld1_u16(source_16), 6);
483         added_2 = vqadd_u16(values, vreinterpret_u16_u8(randvec));
484         source_16 += 4;
485 
486         zipped =
487             vuzp_u8(vreinterpret_u8_u16(added_1), vreinterpret_u8_u16(added_2))
488                 .val[1];
489         vst1_u8(destination, zipped);
490         destination += 8;
491       }
492 
493       uint32_t randval = 0;
494       // For the remaining pixels in each row - usually none, as most
495       // standard sizes are divisible by 32 - convert them "by hand".
496       for (; j < decoder_buffer->displayed_width[plane_index]; j++) {
497         if (!randval) randval = random();
498         destination[j] = (source_16[j] + (randval & 3)) >> 2;
499         randval >>= 2;
500       }
501 
502       source += decoder_buffer->stride[plane_index];
503       data += decoder_buffer->stride[plane_index];
504     }
505   }
506 }
507 #endif  // CPU_FEATURES_COMPILED_ANY_ARM_NEON
508 
509 }  // namespace
510 
DECODER_FUNC(jlong,gav1Init,jint threads)511 DECODER_FUNC(jlong, gav1Init, jint threads) {
512   JniContext* context = new (std::nothrow) JniContext();
513   if (context == nullptr) {
514     return kStatusError;
515   }
516 
517 #ifdef CPU_FEATURES_ARCH_ARM
518   // Libgav1 requires NEON with arm ABIs.
519 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
520   const cpu_features::ArmFeatures arm_features =
521       cpu_features::GetArmInfo().features;
522   if (!arm_features.neon) {
523     context->jni_status_code = kJniStatusNeonNotSupported;
524     return reinterpret_cast<jlong>(context);
525   }
526 #else
527   context->jni_status_code = kJniStatusNeonNotSupported;
528   return reinterpret_cast<jlong>(context);
529 #endif  // CPU_FEATURES_COMPILED_ANY_ARM_NEON
530 #endif  // CPU_FEATURES_ARCH_ARM
531 
532   libgav1::DecoderSettings settings;
533   settings.threads = threads;
534   settings.get_frame_buffer = Libgav1GetFrameBuffer;
535   settings.release_frame_buffer = Libgav1ReleaseFrameBuffer;
536   settings.callback_private_data = context;
537 
538   context->libgav1_status_code = context->decoder.Init(&settings);
539   if (context->libgav1_status_code != kLibgav1StatusOk) {
540     return reinterpret_cast<jlong>(context);
541   }
542 
543   // Populate JNI References.
544   const jclass outputBufferClass = env->FindClass(
545       "com/google/android/exoplayer2/video/VideoDecoderOutputBuffer");
546   context->decoder_private_field =
547       env->GetFieldID(outputBufferClass, "decoderPrivate", "I");
548   context->output_mode_field = env->GetFieldID(outputBufferClass, "mode", "I");
549   context->data_field =
550       env->GetFieldID(outputBufferClass, "data", "Ljava/nio/ByteBuffer;");
551   context->init_for_private_frame_method =
552       env->GetMethodID(outputBufferClass, "initForPrivateFrame", "(II)V");
553   context->init_for_yuv_frame_method =
554       env->GetMethodID(outputBufferClass, "initForYuvFrame", "(IIIII)Z");
555 
556   return reinterpret_cast<jlong>(context);
557 }
558 
DECODER_FUNC(void,gav1Close,jlong jContext)559 DECODER_FUNC(void, gav1Close, jlong jContext) {
560   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
561   delete context;
562 }
563 
DECODER_FUNC(jint,gav1Decode,jlong jContext,jobject encodedData,jint length)564 DECODER_FUNC(jint, gav1Decode, jlong jContext, jobject encodedData,
565              jint length) {
566   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
567   const uint8_t* const buffer = reinterpret_cast<const uint8_t*>(
568       env->GetDirectBufferAddress(encodedData));
569   context->libgav1_status_code =
570       context->decoder.EnqueueFrame(buffer, length, /*user_private_data=*/0,
571                                     /*buffer_private_data=*/nullptr);
572   if (context->libgav1_status_code != kLibgav1StatusOk) {
573     return kStatusError;
574   }
575   return kStatusOk;
576 }
577 
DECODER_FUNC(jint,gav1GetFrame,jlong jContext,jobject jOutputBuffer,jboolean decodeOnly)578 DECODER_FUNC(jint, gav1GetFrame, jlong jContext, jobject jOutputBuffer,
579              jboolean decodeOnly) {
580   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
581   const libgav1::DecoderBuffer* decoder_buffer;
582   context->libgav1_status_code = context->decoder.DequeueFrame(&decoder_buffer);
583   if (context->libgav1_status_code != kLibgav1StatusOk) {
584     return kStatusError;
585   }
586 
587   if (decodeOnly || decoder_buffer == nullptr) {
588     // This is not an error. The input data was decode-only or no displayable
589     // frames are available.
590     return kStatusDecodeOnly;
591   }
592 
593   const int output_mode =
594       env->GetIntField(jOutputBuffer, context->output_mode_field);
595   if (output_mode == kOutputModeYuv) {
596     // Resize the buffer if required. Default color conversion will be used as
597     // libgav1::DecoderBuffer doesn't expose color space info.
598     const jboolean init_result = env->CallBooleanMethod(
599         jOutputBuffer, context->init_for_yuv_frame_method,
600         decoder_buffer->displayed_width[kPlaneY],
601         decoder_buffer->displayed_height[kPlaneY],
602         decoder_buffer->stride[kPlaneY], decoder_buffer->stride[kPlaneU],
603         kColorSpaceUnknown);
604     if (env->ExceptionCheck()) {
605       // Exception is thrown in Java when returning from the native call.
606       return kStatusError;
607     }
608     if (!init_result) {
609       context->jni_status_code = kJniStatusBufferResizeError;
610       return kStatusError;
611     }
612 
613     const jobject data_object =
614         env->GetObjectField(jOutputBuffer, context->data_field);
615     jbyte* const data =
616         reinterpret_cast<jbyte*>(env->GetDirectBufferAddress(data_object));
617 
618     switch (decoder_buffer->bitdepth) {
619       case 8:
620         CopyFrameToDataBuffer(decoder_buffer, data);
621         break;
622       case 10:
623 #ifdef CPU_FEATURES_COMPILED_ANY_ARM_NEON
624         Convert10BitFrameTo8BitDataBufferNeon(decoder_buffer, data);
625 #else
626         Convert10BitFrameTo8BitDataBuffer(decoder_buffer, data);
627 #endif  // CPU_FEATURES_COMPILED_ANY_ARM_NEON
628         break;
629       default:
630         context->jni_status_code = kJniStatusBitDepth12NotSupportedWithYuv;
631         return kStatusError;
632     }
633   } else if (output_mode == kOutputModeSurfaceYuv) {
634     if (decoder_buffer->bitdepth != 8) {
635       context->jni_status_code =
636           kJniStatusHighBitDepthNotSupportedWithSurfaceYuv;
637       return kStatusError;
638     }
639 
640     if (decoder_buffer->NumPlanes() > kMaxPlanes) {
641       context->jni_status_code = kJniStatusInvalidNumOfPlanes;
642       return kStatusError;
643     }
644 
645     const int buffer_id =
646         *static_cast<const int*>(decoder_buffer->buffer_private_data);
647     context->buffer_manager.AddBufferReference(buffer_id);
648     JniFrameBuffer* const jni_buffer =
649         context->buffer_manager.GetBuffer(buffer_id);
650     jni_buffer->SetFrameData(*decoder_buffer);
651     env->CallVoidMethod(jOutputBuffer, context->init_for_private_frame_method,
652                         decoder_buffer->displayed_width[kPlaneY],
653                         decoder_buffer->displayed_height[kPlaneY]);
654     if (env->ExceptionCheck()) {
655       // Exception is thrown in Java when returning from the native call.
656       return kStatusError;
657     }
658     env->SetIntField(jOutputBuffer, context->decoder_private_field, buffer_id);
659   }
660 
661   return kStatusOk;
662 }
663 
DECODER_FUNC(jint,gav1RenderFrame,jlong jContext,jobject jSurface,jobject jOutputBuffer)664 DECODER_FUNC(jint, gav1RenderFrame, jlong jContext, jobject jSurface,
665              jobject jOutputBuffer) {
666   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
667   const int buffer_id =
668       env->GetIntField(jOutputBuffer, context->decoder_private_field);
669   JniFrameBuffer* const jni_buffer =
670       context->buffer_manager.GetBuffer(buffer_id);
671 
672   if (!context->MaybeAcquireNativeWindow(env, jSurface)) {
673     return kStatusError;
674   }
675 
676   if (context->native_window_width != jni_buffer->DisplayedWidth(kPlaneY) ||
677       context->native_window_height != jni_buffer->DisplayedHeight(kPlaneY)) {
678     if (ANativeWindow_setBuffersGeometry(
679             context->native_window, jni_buffer->DisplayedWidth(kPlaneY),
680             jni_buffer->DisplayedHeight(kPlaneY), kImageFormatYV12)) {
681       context->jni_status_code = kJniStatusANativeWindowError;
682       return kStatusError;
683     }
684     context->native_window_width = jni_buffer->DisplayedWidth(kPlaneY);
685     context->native_window_height = jni_buffer->DisplayedHeight(kPlaneY);
686   }
687 
688   ANativeWindow_Buffer native_window_buffer;
689   if (ANativeWindow_lock(context->native_window, &native_window_buffer,
690                          /*inOutDirtyBounds=*/nullptr) ||
691       native_window_buffer.bits == nullptr) {
692     context->jni_status_code = kJniStatusANativeWindowError;
693     return kStatusError;
694   }
695 
696   // Y plane
697   CopyPlane(jni_buffer->Plane(kPlaneY), jni_buffer->Stride(kPlaneY),
698             reinterpret_cast<uint8_t*>(native_window_buffer.bits),
699             native_window_buffer.stride, jni_buffer->DisplayedWidth(kPlaneY),
700             jni_buffer->DisplayedHeight(kPlaneY));
701 
702   const int y_plane_size =
703       native_window_buffer.stride * native_window_buffer.height;
704   const int32_t native_window_buffer_uv_height =
705       (native_window_buffer.height + 1) / 2;
706   const int native_window_buffer_uv_stride =
707       AlignTo16(native_window_buffer.stride / 2);
708 
709   // TODO(b/140606738): Handle monochrome videos.
710 
711   // V plane
712   // Since the format for ANativeWindow is YV12, V plane is being processed
713   // before U plane.
714   const int v_plane_height = std::min(native_window_buffer_uv_height,
715                                       jni_buffer->DisplayedHeight(kPlaneV));
716   CopyPlane(
717       jni_buffer->Plane(kPlaneV), jni_buffer->Stride(kPlaneV),
718       reinterpret_cast<uint8_t*>(native_window_buffer.bits) + y_plane_size,
719       native_window_buffer_uv_stride, jni_buffer->DisplayedWidth(kPlaneV),
720       v_plane_height);
721 
722   const int v_plane_size = v_plane_height * native_window_buffer_uv_stride;
723 
724   // U plane
725   CopyPlane(jni_buffer->Plane(kPlaneU), jni_buffer->Stride(kPlaneU),
726             reinterpret_cast<uint8_t*>(native_window_buffer.bits) +
727                 y_plane_size + v_plane_size,
728             native_window_buffer_uv_stride, jni_buffer->DisplayedWidth(kPlaneU),
729             std::min(native_window_buffer_uv_height,
730                      jni_buffer->DisplayedHeight(kPlaneU)));
731 
732   if (ANativeWindow_unlockAndPost(context->native_window)) {
733     context->jni_status_code = kJniStatusANativeWindowError;
734     return kStatusError;
735   }
736 
737   return kStatusOk;
738 }
739 
DECODER_FUNC(void,gav1ReleaseFrame,jlong jContext,jobject jOutputBuffer)740 DECODER_FUNC(void, gav1ReleaseFrame, jlong jContext, jobject jOutputBuffer) {
741   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
742   const int buffer_id =
743       env->GetIntField(jOutputBuffer, context->decoder_private_field);
744   env->SetIntField(jOutputBuffer, context->decoder_private_field, -1);
745   context->jni_status_code = context->buffer_manager.ReleaseBuffer(buffer_id);
746   if (context->jni_status_code != kJniStatusOk) {
747     LOGE("%s", GetJniErrorMessage(context->jni_status_code));
748   }
749 }
750 
DECODER_FUNC(jstring,gav1GetErrorMessage,jlong jContext)751 DECODER_FUNC(jstring, gav1GetErrorMessage, jlong jContext) {
752   if (jContext == 0) {
753     return env->NewStringUTF("Failed to initialize JNI context.");
754   }
755 
756   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
757   if (context->libgav1_status_code != kLibgav1StatusOk) {
758     return env->NewStringUTF(
759         libgav1::GetErrorString(context->libgav1_status_code));
760   }
761   if (context->jni_status_code != kJniStatusOk) {
762     return env->NewStringUTF(GetJniErrorMessage(context->jni_status_code));
763   }
764 
765   return env->NewStringUTF("None.");
766 }
767 
DECODER_FUNC(jint,gav1CheckError,jlong jContext)768 DECODER_FUNC(jint, gav1CheckError, jlong jContext) {
769   JniContext* const context = reinterpret_cast<JniContext*>(jContext);
770   if (context->libgav1_status_code != kLibgav1StatusOk ||
771       context->jni_status_code != kJniStatusOk) {
772     return kStatusError;
773   }
774   return kStatusOk;
775 }
776 
777 // TODO(b/139902005): Add functions for getting libgav1 version and build
778 // configuration once libgav1 ABI provides this information.
779