1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "sdk/android/src/jni/video_encoder_wrapper.h"
12
13 #include <utility>
14
15 #include "common_video/h264/h264_common.h"
16 #include "modules/video_coding/include/video_codec_interface.h"
17 #include "modules/video_coding/include/video_error_codes.h"
18 #include "modules/video_coding/utility/vp8_header_parser.h"
19 #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
20 #include "rtc_base/logging.h"
21 #include "rtc_base/task_utils/to_queued_task.h"
22 #include "rtc_base/time_utils.h"
23 #include "sdk/android/generated_video_jni/VideoEncoderWrapper_jni.h"
24 #include "sdk/android/generated_video_jni/VideoEncoder_jni.h"
25 #include "sdk/android/native_api/jni/class_loader.h"
26 #include "sdk/android/native_api/jni/java_types.h"
27 #include "sdk/android/src/jni/encoded_image.h"
28 #include "sdk/android/src/jni/video_codec_status.h"
29
30 namespace webrtc {
31 namespace jni {
32
VideoEncoderWrapper(JNIEnv * jni,const JavaRef<jobject> & j_encoder)33 VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni,
34 const JavaRef<jobject>& j_encoder)
35 : encoder_(jni, j_encoder), int_array_class_(GetClass(jni, "[I")) {
36 initialized_ = false;
37 num_resets_ = 0;
38
39 // Get bitrate limits in the constructor. This is a static property of the
40 // encoder and is expected to be available before it is initialized.
41 encoder_info_.resolution_bitrate_limits = JavaToNativeResolutionBitrateLimits(
42 jni, Java_VideoEncoder_getResolutionBitrateLimits(jni, encoder_));
43 }
44 VideoEncoderWrapper::~VideoEncoderWrapper() = default;
45
InitEncode(const VideoCodec * codec_settings,const Settings & settings)46 int VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
47 const Settings& settings) {
48 JNIEnv* jni = AttachCurrentThreadIfNeeded();
49
50 codec_settings_ = *codec_settings;
51 capabilities_ = settings.capabilities;
52 number_of_cores_ = settings.number_of_cores;
53 num_resets_ = 0;
54
55 return InitEncodeInternal(jni);
56 }
57
InitEncodeInternal(JNIEnv * jni)58 int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
59 bool automatic_resize_on;
60 switch (codec_settings_.codecType) {
61 case kVideoCodecVP8:
62 automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
63 break;
64 case kVideoCodecVP9:
65 automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
66 gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
67 gof_idx_ = 0;
68 break;
69 default:
70 automatic_resize_on = true;
71 }
72
73 RTC_DCHECK(capabilities_);
74 ScopedJavaLocalRef<jobject> capabilities =
75 Java_Capabilities_Constructor(jni, capabilities_->loss_notification);
76
77 ScopedJavaLocalRef<jobject> settings = Java_Settings_Constructor(
78 jni, number_of_cores_, codec_settings_.width, codec_settings_.height,
79 static_cast<int>(codec_settings_.startBitrate),
80 static_cast<int>(codec_settings_.maxFramerate),
81 static_cast<int>(codec_settings_.numberOfSimulcastStreams),
82 automatic_resize_on, capabilities);
83
84 ScopedJavaLocalRef<jobject> callback =
85 Java_VideoEncoderWrapper_createEncoderCallback(jni,
86 jlongFromPointer(this));
87
88 int32_t status = JavaToNativeVideoCodecStatus(
89 jni, Java_VideoEncoder_initEncode(jni, encoder_, settings, callback));
90 RTC_LOG(LS_INFO) << "initEncode: " << status;
91
92 encoder_info_.supports_native_handle = true;
93 encoder_info_.implementation_name = GetImplementationName(jni);
94 encoder_info_.scaling_settings = GetScalingSettingsInternal(jni);
95 encoder_info_.is_hardware_accelerated = IsHardwareVideoEncoder(jni, encoder_);
96 encoder_info_.has_internal_source = false;
97
98 if (status == WEBRTC_VIDEO_CODEC_OK) {
99 initialized_ = true;
100 }
101 return status;
102 }
103
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)104 int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
105 EncodedImageCallback* callback) {
106 callback_ = callback;
107 return WEBRTC_VIDEO_CODEC_OK;
108 }
109
Release()110 int32_t VideoEncoderWrapper::Release() {
111 JNIEnv* jni = AttachCurrentThreadIfNeeded();
112
113 int32_t status = JavaToNativeVideoCodecStatus(
114 jni, Java_VideoEncoder_release(jni, encoder_));
115 RTC_LOG(LS_INFO) << "release: " << status;
116 frame_extra_infos_.clear();
117 initialized_ = false;
118
119 return status;
120 }
121
Encode(const VideoFrame & frame,const std::vector<VideoFrameType> * frame_types)122 int32_t VideoEncoderWrapper::Encode(
123 const VideoFrame& frame,
124 const std::vector<VideoFrameType>* frame_types) {
125 if (!initialized_) {
126 // Most likely initializing the codec failed.
127 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
128 }
129
130 JNIEnv* jni = AttachCurrentThreadIfNeeded();
131
132 // Construct encode info.
133 ScopedJavaLocalRef<jobjectArray> j_frame_types =
134 NativeToJavaFrameTypeArray(jni, *frame_types);
135 ScopedJavaLocalRef<jobject> encode_info =
136 Java_EncodeInfo_Constructor(jni, j_frame_types);
137
138 FrameExtraInfo info;
139 info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
140 info.timestamp_rtp = frame.timestamp();
141 frame_extra_infos_.push_back(info);
142
143 ScopedJavaLocalRef<jobject> j_frame = NativeToJavaVideoFrame(jni, frame);
144 ScopedJavaLocalRef<jobject> ret =
145 Java_VideoEncoder_encode(jni, encoder_, j_frame, encode_info);
146 ReleaseJavaVideoFrame(jni, j_frame);
147 return HandleReturnCode(jni, ret, "encode");
148 }
149
SetRates(const RateControlParameters & parameters)150 void VideoEncoderWrapper::SetRates(const RateControlParameters& parameters) {
151 JNIEnv* jni = AttachCurrentThreadIfNeeded();
152
153 ScopedJavaLocalRef<jobject> j_bitrate_allocation =
154 ToJavaBitrateAllocation(jni, parameters.bitrate);
155 ScopedJavaLocalRef<jobject> ret = Java_VideoEncoder_setRateAllocation(
156 jni, encoder_, j_bitrate_allocation,
157 (jint)(parameters.framerate_fps + 0.5));
158 HandleReturnCode(jni, ret, "setRateAllocation");
159 }
160
GetEncoderInfo() const161 VideoEncoder::EncoderInfo VideoEncoderWrapper::GetEncoderInfo() const {
162 return encoder_info_;
163 }
164
165 VideoEncoderWrapper::ScalingSettings
GetScalingSettingsInternal(JNIEnv * jni) const166 VideoEncoderWrapper::GetScalingSettingsInternal(JNIEnv* jni) const {
167 ScopedJavaLocalRef<jobject> j_scaling_settings =
168 Java_VideoEncoder_getScalingSettings(jni, encoder_);
169 bool isOn =
170 Java_VideoEncoderWrapper_getScalingSettingsOn(jni, j_scaling_settings);
171
172 if (!isOn)
173 return ScalingSettings::kOff;
174
175 absl::optional<int> low = JavaToNativeOptionalInt(
176 jni,
177 Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
178 absl::optional<int> high = JavaToNativeOptionalInt(
179 jni,
180 Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));
181
182 if (low && high)
183 return ScalingSettings(*low, *high);
184
185 switch (codec_settings_.codecType) {
186 case kVideoCodecVP8: {
187 // Same as in vp8_impl.cc.
188 static const int kLowVp8QpThreshold = 29;
189 static const int kHighVp8QpThreshold = 95;
190 return ScalingSettings(low.value_or(kLowVp8QpThreshold),
191 high.value_or(kHighVp8QpThreshold));
192 }
193 case kVideoCodecVP9: {
194 // QP is obtained from VP9-bitstream, so the QP corresponds to the
195 // bitstream range of [0, 255] and not the user-level range of [0,63].
196 static const int kLowVp9QpThreshold = 96;
197 static const int kHighVp9QpThreshold = 185;
198
199 return VideoEncoder::ScalingSettings(kLowVp9QpThreshold,
200 kHighVp9QpThreshold);
201 }
202 case kVideoCodecH264: {
203 // Same as in h264_encoder_impl.cc.
204 static const int kLowH264QpThreshold = 24;
205 static const int kHighH264QpThreshold = 37;
206 return ScalingSettings(low.value_or(kLowH264QpThreshold),
207 high.value_or(kHighH264QpThreshold));
208 }
209 default:
210 return ScalingSettings::kOff;
211 }
212 }
213
OnEncodedFrame(JNIEnv * jni,const JavaRef<jobject> & j_encoded_image)214 void VideoEncoderWrapper::OnEncodedFrame(
215 JNIEnv* jni,
216 const JavaRef<jobject>& j_encoded_image) {
217 EncodedImage frame = JavaToNativeEncodedImage(jni, j_encoded_image);
218 int64_t capture_time_ns =
219 GetJavaEncodedImageCaptureTimeNs(jni, j_encoded_image);
220
221 // Encoded frames are delivered in the order received, but some of them
222 // may be dropped, so remove records of frames older than the current
223 // one.
224 //
225 // NOTE: if the current frame is associated with Encoder A, in the time
226 // since this frame was received, Encoder A could have been
227 // Release()'ed, Encoder B InitEncode()'ed (due to reuse of Encoder A),
228 // and frames received by Encoder B. Thus there may be frame_extra_infos
229 // entries that don't belong to us, and we need to be careful not to
230 // remove them. Removing only those entries older than the current frame
231 // provides this guarantee.
232 while (!frame_extra_infos_.empty() &&
233 frame_extra_infos_.front().capture_time_ns < capture_time_ns) {
234 frame_extra_infos_.pop_front();
235 }
236 if (frame_extra_infos_.empty() ||
237 frame_extra_infos_.front().capture_time_ns != capture_time_ns) {
238 RTC_LOG(LS_WARNING)
239 << "Java encoder produced an unexpected frame with timestamp: "
240 << capture_time_ns;
241 return;
242 }
243 FrameExtraInfo frame_extra_info = std::move(frame_extra_infos_.front());
244 frame_extra_infos_.pop_front();
245
246 // This is a bit subtle. The |frame| variable from the lambda capture is
247 // const. Which implies that (i) we need to make a copy to be able to
248 // write to the metadata, and (ii) we should avoid using the .data()
249 // method (including implicit conversion to ArrayView) on the non-const
250 // copy, since that would trigget a copy operation on the underlying
251 // CopyOnWriteBuffer.
252 EncodedImage frame_copy = frame;
253
254 frame_copy.SetTimestamp(frame_extra_info.timestamp_rtp);
255 frame_copy.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
256
257 RTPFragmentationHeader header = ParseFragmentationHeader(frame);
258 if (frame_copy.qp_ < 0)
259 frame_copy.qp_ = ParseQp(frame);
260
261 CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
262
263 callback_->OnEncodedImage(frame_copy, &info, &header);
264 }
265
HandleReturnCode(JNIEnv * jni,const JavaRef<jobject> & j_value,const char * method_name)266 int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni,
267 const JavaRef<jobject>& j_value,
268 const char* method_name) {
269 int32_t value = JavaToNativeVideoCodecStatus(jni, j_value);
270 if (value >= 0) { // OK or NO_OUTPUT
271 return value;
272 }
273
274 RTC_LOG(LS_WARNING) << method_name << ": " << value;
275 if (value == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE ||
276 value == WEBRTC_VIDEO_CODEC_UNINITIALIZED) { // Critical error.
277 RTC_LOG(LS_WARNING) << "Java encoder requested software fallback.";
278 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
279 }
280
281 // Try resetting the codec.
282 if (Release() == WEBRTC_VIDEO_CODEC_OK &&
283 InitEncodeInternal(jni) == WEBRTC_VIDEO_CODEC_OK) {
284 RTC_LOG(LS_WARNING) << "Reset Java encoder.";
285 return WEBRTC_VIDEO_CODEC_ERROR;
286 }
287
288 RTC_LOG(LS_WARNING) << "Unable to reset Java encoder.";
289 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
290 }
291
ParseFragmentationHeader(rtc::ArrayView<const uint8_t> buffer)292 RTPFragmentationHeader VideoEncoderWrapper::ParseFragmentationHeader(
293 rtc::ArrayView<const uint8_t> buffer) {
294 RTPFragmentationHeader header;
295 if (codec_settings_.codecType == kVideoCodecH264) {
296 h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size());
297
298 // For H.264 search for start codes.
299 const std::vector<H264::NaluIndex> nalu_idxs =
300 H264::FindNaluIndices(buffer.data(), buffer.size());
301 if (nalu_idxs.empty()) {
302 RTC_LOG(LS_ERROR) << "Start code is not found!";
303 RTC_LOG(LS_ERROR) << "Data:" << buffer[0] << " " << buffer[1] << " "
304 << buffer[2] << " " << buffer[3] << " " << buffer[4]
305 << " " << buffer[5];
306 }
307 header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
308 for (size_t i = 0; i < nalu_idxs.size(); i++) {
309 header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset;
310 header.fragmentationLength[i] = nalu_idxs[i].payload_size;
311 }
312 } else {
313 // Generate a header describing a single fragment.
314 header.VerifyAndAllocateFragmentationHeader(1);
315 header.fragmentationOffset[0] = 0;
316 header.fragmentationLength[0] = buffer.size();
317 }
318 return header;
319 }
320
ParseQp(rtc::ArrayView<const uint8_t> buffer)321 int VideoEncoderWrapper::ParseQp(rtc::ArrayView<const uint8_t> buffer) {
322 int qp;
323 bool success;
324 switch (codec_settings_.codecType) {
325 case kVideoCodecVP8:
326 success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
327 break;
328 case kVideoCodecVP9:
329 success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
330 break;
331 case kVideoCodecH264:
332 success = h264_bitstream_parser_.GetLastSliceQp(&qp);
333 break;
334 default: // Default is to not provide QP.
335 success = false;
336 break;
337 }
338 return success ? qp : -1; // -1 means unknown QP.
339 }
340
ParseCodecSpecificInfo(const EncodedImage & frame)341 CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
342 const EncodedImage& frame) {
343 const bool key_frame = frame._frameType == VideoFrameType::kVideoFrameKey;
344
345 CodecSpecificInfo info;
346 info.codecType = codec_settings_.codecType;
347
348 switch (codec_settings_.codecType) {
349 case kVideoCodecVP8:
350 info.codecSpecific.VP8.nonReference = false;
351 info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
352 info.codecSpecific.VP8.layerSync = false;
353 info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
354 break;
355 case kVideoCodecVP9:
356 if (key_frame) {
357 gof_idx_ = 0;
358 }
359 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
360 info.codecSpecific.VP9.flexible_mode = false;
361 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
362 info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
363 info.codecSpecific.VP9.temporal_up_switch = true;
364 info.codecSpecific.VP9.inter_layer_predicted = false;
365 info.codecSpecific.VP9.gof_idx =
366 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
367 info.codecSpecific.VP9.num_spatial_layers = 1;
368 info.codecSpecific.VP9.first_frame_in_picture = true;
369 info.codecSpecific.VP9.end_of_picture = true;
370 info.codecSpecific.VP9.spatial_layer_resolution_present = false;
371 if (info.codecSpecific.VP9.ss_data_available) {
372 info.codecSpecific.VP9.spatial_layer_resolution_present = true;
373 info.codecSpecific.VP9.width[0] = frame._encodedWidth;
374 info.codecSpecific.VP9.height[0] = frame._encodedHeight;
375 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
376 }
377 break;
378 default:
379 break;
380 }
381
382 return info;
383 }
384
ToJavaBitrateAllocation(JNIEnv * jni,const VideoBitrateAllocation & allocation)385 ScopedJavaLocalRef<jobject> VideoEncoderWrapper::ToJavaBitrateAllocation(
386 JNIEnv* jni,
387 const VideoBitrateAllocation& allocation) {
388 ScopedJavaLocalRef<jobjectArray> j_allocation_array(
389 jni, jni->NewObjectArray(kMaxSpatialLayers, int_array_class_.obj(),
390 nullptr /* initial */));
391 for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
392 std::array<int32_t, kMaxTemporalStreams> spatial_layer;
393 for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
394 spatial_layer[temporal_i] = allocation.GetBitrate(spatial_i, temporal_i);
395 }
396
397 ScopedJavaLocalRef<jintArray> j_array_spatial_layer =
398 NativeToJavaIntArray(jni, spatial_layer);
399 jni->SetObjectArrayElement(j_allocation_array.obj(), spatial_i,
400 j_array_spatial_layer.obj());
401 }
402 return Java_BitrateAllocation_Constructor(jni, j_allocation_array);
403 }
404
GetImplementationName(JNIEnv * jni) const405 std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
406 return JavaToStdString(
407 jni, Java_VideoEncoder_getImplementationName(jni, encoder_));
408 }
409
JavaToNativeVideoEncoder(JNIEnv * jni,const JavaRef<jobject> & j_encoder)410 std::unique_ptr<VideoEncoder> JavaToNativeVideoEncoder(
411 JNIEnv* jni,
412 const JavaRef<jobject>& j_encoder) {
413 const jlong native_encoder =
414 Java_VideoEncoder_createNativeVideoEncoder(jni, j_encoder);
415 VideoEncoder* encoder;
416 if (native_encoder == 0) {
417 encoder = new VideoEncoderWrapper(jni, j_encoder);
418 } else {
419 encoder = reinterpret_cast<VideoEncoder*>(native_encoder);
420 }
421 return std::unique_ptr<VideoEncoder>(encoder);
422 }
423
IsHardwareVideoEncoder(JNIEnv * jni,const JavaRef<jobject> & j_encoder)424 bool IsHardwareVideoEncoder(JNIEnv* jni, const JavaRef<jobject>& j_encoder) {
425 return Java_VideoEncoder_isHardwareEncoder(jni, j_encoder);
426 }
427
428 std::vector<VideoEncoder::ResolutionBitrateLimits>
JavaToNativeResolutionBitrateLimits(JNIEnv * jni,const JavaRef<jobjectArray> & j_bitrate_limits_array)429 JavaToNativeResolutionBitrateLimits(
430 JNIEnv* jni,
431 const JavaRef<jobjectArray>& j_bitrate_limits_array) {
432 std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits;
433
434 const jsize array_length = jni->GetArrayLength(j_bitrate_limits_array.obj());
435 for (int i = 0; i < array_length; ++i) {
436 ScopedJavaLocalRef<jobject> j_bitrate_limits = ScopedJavaLocalRef<jobject>(
437 jni, jni->GetObjectArrayElement(j_bitrate_limits_array.obj(), i));
438
439 jint frame_size_pixels =
440 Java_ResolutionBitrateLimits_getFrameSizePixels(jni, j_bitrate_limits);
441 jint min_start_bitrate_bps =
442 Java_ResolutionBitrateLimits_getMinStartBitrateBps(jni,
443 j_bitrate_limits);
444 jint min_bitrate_bps =
445 Java_ResolutionBitrateLimits_getMinBitrateBps(jni, j_bitrate_limits);
446 jint max_bitrate_bps =
447 Java_ResolutionBitrateLimits_getMaxBitrateBps(jni, j_bitrate_limits);
448
449 resolution_bitrate_limits.push_back(VideoEncoder::ResolutionBitrateLimits(
450 frame_size_pixels, min_start_bitrate_bps, min_bitrate_bps,
451 max_bitrate_bps));
452 }
453
454 return resolution_bitrate_limits;
455 }
456
457 } // namespace jni
458 } // namespace webrtc
459